From bee23ba899dd285566d1816f7c1daca90cc26673 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:18:27 +0200 Subject: [PATCH 001/314] fix: update to gatk4.2.0.0 (#576) --- modules/gatk4/samtofastq/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 00d5d359..486d6b5d 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -11,11 +11,11 @@ process GATK4_SAMTOFASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::gatk4=4.1.9.0' : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0' + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container 'quay.io/biocontainers/gatk4:4.1.9.0--py39_0' + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: From 9fb21e1a84edfa5cbf459b5115682483cb6a2467 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:22:01 +0200 Subject: [PATCH 002/314] fix: update to gatk4.2.0.0 (#577) --- modules/gatk4/revertsam/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index a3bf3004..2f4959db 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -11,11 +11,11 @@ process GATK4_REVERTSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::gatk4=4.1.9.0' : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0' + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container 'quay.io/biocontainers/gatk4:4.1.9.0--py39_0' + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: From e7be649029b97d12ddf33baec8e85e141f3ece1c Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:23:08 +0200 Subject: [PATCH 003/314] fix: update to gatk4.2.0.0 (#578) --- modules/gatk4/mergevcfs/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index 9feb8187..5a80c9ff 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -11,11 +11,11 @@ process GATK4_MERGEVCFS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::gatk4=4.1.9.0' : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0' + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container 'quay.io/biocontainers/gatk4:4.1.9.0--py39_0' + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: From c8877835c288eb32b2ad1810d38a78e83e791f3d Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:24:29 +0200 Subject: [PATCH 004/314] fix: update to gatk4.2.0.0 (#579) --- modules/gatk4/createsequencedictionary/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 3cf5543a..0276e8b2 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -11,11 +11,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::gatk4=4.1.9.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container "quay.io/biocontainers/gatk4:4.1.9.0--py39_0" + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: From d624336cc1d219ff96d3cf00ab5933d2276bbdac Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:25:22 +0200 Subject: [PATCH 005/314] fix: update to gatk4.2.0.0 (#580) --- modules/gatk4/bedtointervallist/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 6d98e1d6..af385f8f 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -11,11 +11,11 @@ process GATK4_BEDTOINTERVALLIST { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::gatk4=4.1.9.0' : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0' + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container 'quay.io/biocontainers/gatk4:4.1.9.0--py39_0' + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: From b3c466773207002060efbeff384b6f03ec0d7d9f Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:26:28 +0200 Subject: [PATCH 006/314] fix: update to gatk4.2.0.0 (#575) * fix: update to gatk4.2.0.0 * fix: update md5sum --- modules/gatk4/splitncigarreads/main.nf | 6 +++--- tests/modules/gatk4/splitncigarreads/test.yml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 5c7a6ca8..a8724e2e 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -11,11 +11,11 @@ process GATK4_SPLITNCIGARREADS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::gatk4=4.1.9.0' : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/gatk4:4.1.9.0--py39_0' + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" } else { - container 'quay.io/biocontainers/gatk4:4.1.9.0--py39_0' + container "quay.io/biocontainers/gatk4:4.2.0.0--0" } input: diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index c2eb2b50..d6827db9 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -5,4 +5,4 @@ - gatk4/splitncigarreads files: - path: output/gatk4/test.bam - md5sum: 900af0f67749d5ffac961354178ecb29 + md5sum: 8d05a41f9467e62d3fc1bc725f0869ec From 65ab646870c4ff74322b4fff22ad51b190791a2c Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 11:54:21 +0200 Subject: [PATCH 007/314] fix: update seqkit to 0.16.0 (#581) --- modules/seqkit/split2/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index ec7057f7..4c516c93 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -12,12 +12,12 @@ process SEQKIT_SPLIT2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::seqkit=0.15.0" : null) + conda (params.enable_conda ? "bioconda::seqkit=0.16.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqkit:0.15.0--0" + container "https://depot.galaxyproject.org/singularity/seqkit:0.16.0--h9ee0642_0" } else { - container "quay.io/biocontainers/seqkit:0.15.0--0" + container "quay.io/biocontainers/seqkit:0.16.0--h9ee0642_0" } input: From adfb8a3d6541f363aec83df16df873cbb767b388 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 16 Jul 2021 14:32:17 +0200 Subject: [PATCH 008/314] fix: update samtools_merge (#582) * fix: update samtools_merge * Update modules/samtools/merge/main.nf Co-authored-by: Harshil Patel Co-authored-by: Harshil Patel --- modules/samtools/merge/main.nf | 8 ++++---- tests/modules/samtools/merge/main.nf | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 81b2828c..23b31e2f 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -22,14 +22,14 @@ process SAMTOOLS_MERGE { tuple val(meta), path(bams) output: - tuple val(meta), path("*merged.bam"), emit: merged_bam - path "*.version.txt" , emit: version + tuple val(meta), path("${prefix}.bam"), emit: bam + path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ - samtools merge ${prefix}_merged.bam $bams + samtools merge ${prefix}.bam $bams echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt """ } diff --git a/tests/modules/samtools/merge/main.nf b/tests/modules/samtools/merge/main.nf index 75ba886b..a4511a34 100644 --- a/tests/modules/samtools/merge/main.nf +++ b/tests/modules/samtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' addParams( options: [:] ) +include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' addParams( options: [suffix:'_merged'] ) workflow test_samtools_merge { input = [ [ id: 'test' ], // meta map From 2ade120249b92e382f964bd75701befa2c2dc75d Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 16 Jul 2021 14:21:08 +0100 Subject: [PATCH 009/314] Add --paired flag to umitools dedup and bump to 1.1.2 (#583) * Add --paired flag to umitools dedup and bump to 1.1.2 * paired not paired_end * Fix ECLint --- modules/umitools/dedup/main.nf | 14 +++++++------- modules/umitools/extract/main.nf | 10 +++++----- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 93ea6d45..f30ab164 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -11,11 +11,11 @@ process UMITOOLS_DEDUP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::umi_tools=1.1.1" : null) + conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.1--py38h0213d0e_1" + container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" } else { - container "quay.io/biocontainers/umi_tools:1.1.1--py38h0213d0e_1" + container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" } input: @@ -23,19 +23,19 @@ process UMITOOLS_DEDUP { output: tuple val(meta), path("*.bam"), emit: bam - tuple val(meta), path("*.tsv"), emit: tsv path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ -I $bam \\ -S ${prefix}.bam \\ - --output-stats=$prefix \\ - $options.args \\ + $paired \\ + $options.args - umi_tools --version | sed -e "s/UMI-tools version: //g" > ${software}.version.txt + echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt """ } diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index 581f41c3..e5c4e21c 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -11,11 +11,11 @@ process UMITOOLS_EXTRACT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::umi_tools=1.1.1" : null) + conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.1--py38h0213d0e_1" + container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" } else { - container "quay.io/biocontainers/umi_tools:1.1.1--py38h0213d0e_1" + container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" } input: @@ -38,7 +38,7 @@ process UMITOOLS_EXTRACT { $options.args \\ > ${prefix}.umi_extract.log - umi_tools --version | sed -e "s/UMI-tools version: //g" > ${software}.version.txt + echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt """ } else { """ @@ -51,7 +51,7 @@ process UMITOOLS_EXTRACT { $options.args \\ > ${prefix}.umi_extract.log - umi_tools --version | sed -e "s/UMI-tools version: //g" > ${software}.version.txt + echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt """ } } From 8c294882d72940e7cce9b17ed9868a892b7c7d5c Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Mon, 19 Jul 2021 08:53:27 +0100 Subject: [PATCH 010/314] fix: lofreq/call `test.yml` md5sum (#587) --- tests/modules/lofreq/call/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/lofreq/call/test.yml b/tests/modules/lofreq/call/test.yml index a809b2da..88700bfe 100644 --- a/tests/modules/lofreq/call/test.yml +++ b/tests/modules/lofreq/call/test.yml @@ -5,4 +5,4 @@ - lofreq/call files: - path: output/lofreq/test.vcf.gz - md5sum: 421b407a172191e54d054018c8868cf7 + contains: ['##INFO='] From 0b40798d1b59a28bd86aa5b558a0793614be1efa Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Mon, 19 Jul 2021 09:19:27 +0100 Subject: [PATCH 011/314] fix: lofreq/callparallel missing arg (#562) (#586) Co-authored-by: Harshil Patel --- modules/lofreq/callparallel/main.nf | 13 +++++++------ modules/lofreq/callparallel/meta.yml | 1 + tests/modules/lofreq/callparallel/test.yml | 9 +++++---- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 9ebb2805..4392c700 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -20,12 +20,12 @@ process LOFREQ_CALLPARALLEL { input: tuple val(meta), path(bam), path(bai) - file fasta - file fai + path fasta + path fai output: - tuple val(meta), path("*.vcf"), emit: vcf - path "*.version.txt" , emit: version + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) @@ -34,10 +34,11 @@ process LOFREQ_CALLPARALLEL { lofreq \\ call-parallel \\ --pp-threads $task.cpus \\ + $options.args \\ -f $fasta \\ - -o ${prefix}.vcf \\ + -o ${prefix}.vcf.gz \\ $bam - echo \$(lofreq version 2>&1) | sed 's/^.*lofreq //; s/Using.*\$//' > ${software}.version.txt + echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//' > ${software}.version.txt """ } diff --git a/modules/lofreq/callparallel/meta.yml b/modules/lofreq/callparallel/meta.yml index 43756e2b..3154f412 100644 --- a/modules/lofreq/callparallel/meta.yml +++ b/modules/lofreq/callparallel/meta.yml @@ -51,3 +51,4 @@ output: authors: - "@kaurravneet4123" + - "@bjohnnyd" diff --git a/tests/modules/lofreq/callparallel/test.yml b/tests/modules/lofreq/callparallel/test.yml index 3ffb459e..e09f68c3 100644 --- a/tests/modules/lofreq/callparallel/test.yml +++ b/tests/modules/lofreq/callparallel/test.yml @@ -1,7 +1,8 @@ -- name: lofreq callparallel - command: nextflow run ./tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c tests/config/nextflow.config +- name: lofreq callparallel test_lofreq_callparallel + command: nextflow run tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c tests/config/nextflow.config tags: - - lofreq - lofreq/callparallel + - lofreq files: - - path: output/lofreq/test.vcf + - path: output/lofreq/test.vcf.gz + contains: ['##INFO='] From 5dd049047d01e72c01a519422f17e203bca343ac Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Jul 2021 11:27:47 +0100 Subject: [PATCH 012/314] Fix tyop in output for rseqc/junctionannotation (#592) --- modules/rseqc/junctionannotation/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index 909ee9ae..ace4fe61 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -27,7 +27,7 @@ process RSEQC_JUNCTIONANNOTATION { tuple val(meta), path("*.r") , emit: rscript tuple val(meta), path("*.log") , emit: log tuple val(meta), path("*.junction.bed"), optional:true, emit: bed - tuple val(meta), path("*.interact.bed"), optional:true, emit: interact_bed + tuple val(meta), path("*.Interact.bed"), optional:true, emit: interact_bed tuple val(meta), path("*junction.pdf") , optional:true, emit: pdf tuple val(meta), path("*events.pdf") , optional:true, emit: events_pdf path "*.version.txt" , emit: version From e7e30b6da631ce5288151af4e46488ac6d294ff4 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Jul 2021 12:25:36 +0100 Subject: [PATCH 013/314] Bump Pangolin to 3.1.7 (#593) * Bump Pangolin to 3.1.7 * Update md5sum * Update README * Re-word * Re-word again * Use channels --- README.md | 4 +++- modules/pangolin/main.nf | 6 +++--- tests/modules/pangolin/test.yml | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 59857849..9e06162c 100644 --- a/README.md +++ b/README.md @@ -395,6 +395,8 @@ The key words "MUST", "MUST NOT", "SHOULD", etc. are to be interpreted as descri #### General +- All non-mandatory command-line tool options MUST be provided as a string i.e. `options.args` where `options` is a Groovy Map that MUST be provided via the Nextflow `addParams` option when including the module via `include` in the parent workflow. + - Software that can be piped together SHOULD be added to separate module files unless there is a run-time, storage advantage in implementing in this way. For example, using a combination of `bwa` and `samtools` to output a BAM file instead of a SAM file: @@ -431,7 +433,7 @@ using a combination of `bwa` and `samtools` to output a BAM file instead of a SA - A module file SHOULD only define input and output files as command-line parameters to be executed within the process. -- All other parameters MUST be provided as a string i.e. `options.args` where `options` is a Groovy Map that MUST be provided via the Nextflow `addParams` option when including the module via `include` in the parent workflow. +- All `params` within the module MUST be initialised and used in the local context of the module. In other words, named `params` defined in the parent workflow MUST NOT be assumed to be passed to the module to allow developers to call their parameters whatever they want. In general, it may be more suitable to use additional `input` value channels to cater for such scenarios. - If the tool supports multi-threading then you MUST provide the appropriate parameter using the Nextflow `task` variable e.g. `--threads $task.cpus`. diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index 7be9311c..d1417990 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -11,11 +11,11 @@ process PANGOLIN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::pangolin=3.0.5' : null) + conda (params.enable_conda ? 'bioconda::pangolin=3.1.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/pangolin:3.0.5--pyhdfd78af_0' + container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.7--pyhdfd78af_0' } else { - container 'quay.io/biocontainers/pangolin:3.0.5--pyhdfd78af_0' + container 'quay.io/biocontainers/pangolin:3.1.7--pyhdfd78af_0' } input: diff --git a/tests/modules/pangolin/test.yml b/tests/modules/pangolin/test.yml index debee708..2b6b7553 100644 --- a/tests/modules/pangolin/test.yml +++ b/tests/modules/pangolin/test.yml @@ -4,4 +4,4 @@ - pangolin files: - path: ./output/pangolin/test.pangolin.csv - md5sum: 02d916f18095694a7641ebc29fecaeae + md5sum: 8daea6ca9fee7b747080d4d2b28a83d7 From 2e619add8758d422942399db0f89d54ea3c1192e Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Tue, 20 Jul 2021 21:31:31 +0100 Subject: [PATCH 014/314] Add samtools ampliconclip (#590) * created template for `samtools/ampliconclip` (#584) * All tests passing (#584) * Linting fixed (#584) * Final linting fixed (#584) * Optional output flags moved to `input` (#584) * typo fix (#584) * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/samtools/ampliconclip/functions.nf | 68 ++++++++++++++++++++ modules/samtools/ampliconclip/main.nf | 51 +++++++++++++++ modules/samtools/ampliconclip/meta.yml | 64 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/ampliconclip/main.nf | 44 +++++++++++++ tests/modules/samtools/ampliconclip/test.yml | 34 ++++++++++ 6 files changed, 265 insertions(+) create mode 100644 modules/samtools/ampliconclip/functions.nf create mode 100644 modules/samtools/ampliconclip/main.nf create mode 100644 modules/samtools/ampliconclip/meta.yml create mode 100644 tests/modules/samtools/ampliconclip/main.nf create mode 100644 tests/modules/samtools/ampliconclip/test.yml diff --git a/modules/samtools/ampliconclip/functions.nf b/modules/samtools/ampliconclip/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/samtools/ampliconclip/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf new file mode 100644 index 00000000..6ec27ccf --- /dev/null +++ b/modules/samtools/ampliconclip/main.nf @@ -0,0 +1,51 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMTOOLS_AMPLICONCLIP { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + } else { + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + } + + input: + tuple val(meta), path(bam) + path bed + val save_cliprejects + val save_clipstats + + output: + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.clipstats.txt") , optional:true, emit: stats + tuple val(meta), path("*.cliprejects.bam"), optional:true, emit: rejects_bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" + def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" + """ + samtools \\ + ampliconclip \\ + $options.args \\ + -@ $task.cpus \\ + $rejects \\ + $stats \\ + -b $bed \\ + -o ${prefix}.bam \\ + $bam + + echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + """ +} diff --git a/modules/samtools/ampliconclip/meta.yml b/modules/samtools/ampliconclip/meta.yml new file mode 100644 index 00000000..fce06986 --- /dev/null +++ b/modules/samtools/ampliconclip/meta.yml @@ -0,0 +1,64 @@ +name: samtools_ampliconclip +description: write your description here +keywords: + - amplicon + - clipping + - ampliconclip + - samtools ampliconclip + - samtools +tools: + - samtools: + description: | + SAMtools is a set of utilities for interacting with and post-processing + short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. + These files are generated as output by short read aligners like BWA. + homepage: http://www.htslib.org/ + documentation: hhttp://www.htslib.org/doc/samtools.html + doi: 10.1093/bioinformatics/btp352 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - bed: + type: file + description: BED file of regions to be removed (e.g. amplicon primers) + pattern: "*.{bed}" + - save_cliprejects: + type: value + description: Save filtered reads to a file + - save_clipstats: + type: value + description: Save clipping stats to a file + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: Clipped reads BAM file + pattern: "*.{bam}" + - stats: + type: file + description: Clipping statistics text file + pattern: "*.{clipstats.txt}" + - rejects_bam: + type: file + description: Filtered reads BAM file + pattern: "*.{cliprejects.bam}" + +authors: + - "@bjohnnyd" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a839d410..92748eca 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -651,6 +651,10 @@ salmon/quant: - modules/salmon/quant/** - tests/modules/salmon/quant/** +samtools/ampliconclip: + - modules/samtools/ampliconclip/** + - tests/modules/samtools/ampliconclip/** + samtools/faidx: - modules/samtools/faidx/** - tests/modules/samtools/faidx/** diff --git a/tests/modules/samtools/ampliconclip/main.nf b/tests/modules/samtools/ampliconclip/main.nf new file mode 100644 index 00000000..a8d8609f --- /dev/null +++ b/tests/modules/samtools/ampliconclip/main.nf @@ -0,0 +1,44 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' addParams([:]) + +workflow test_samtools_ampliconclip_no_stats_no_rejects { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + save_cliprejects = false + save_clipstats = false + + SAMTOOLS_AMPLICONCLIP ( input, bed, save_cliprejects, save_clipstats ) +} + +workflow test_samtools_ampliconclip_no_stats_with_rejects { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + save_cliprejects = true + save_clipstats = false + + SAMTOOLS_AMPLICONCLIP ( input, bed, save_cliprejects, save_clipstats ) +} + +workflow test_samtools_ampliconclip_with_stats_with_rejects { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + save_cliprejects = true + save_clipstats = true + + SAMTOOLS_AMPLICONCLIP ( input, bed, save_cliprejects, save_clipstats ) +} diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml new file mode 100644 index 00000000..9e41ce5b --- /dev/null +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -0,0 +1,34 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core modules create-test-yml samtools/ampliconclip +- name: samtools ampliconclip no stats no rejects + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c tests/config/nextflow.config + tags: + - samtools + - samtools/ampliconclip + files: + - path: output/samtools/test.bam + md5sum: 1c705ebe39f68f1dac164733ae99c9d2 + +- name: samtools ampliconclip no stats with rejects + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config + tags: + - samtools + - samtools/ampliconclip + files: + - path: output/samtools/test.bam + md5sum: 86c7bfb5378d57b16855c5b399000b2a + - path: output/samtools/test.cliprejects.bam + md5sum: 8e2eea2c0005b4d4e77c0eb549599133 + +- name: samtools ampliconclip with stats with rejects + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config + tags: + - samtools + - samtools/ampliconclip + files: + - path: output/samtools/test.bam + md5sum: d96f5eebef0ff4635e68090e89756d4a + - path: output/samtools/test.cliprejects.bam + md5sum: ad83a523d6ff1c58caade4ddafbaaed7 + - path: output/samtools/test.clipstats.txt + md5sum: 6fbde83d658cd2813b79900d33800d1d From e4df3362941eadb5f53c4a0774c83b50a647a4a9 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Jul 2021 21:50:22 +0100 Subject: [PATCH 015/314] Add hifiasm module (#596) * Add hifiasm module * Tidy up module * Add pacbio files to test_data.config --- modules/hifiasm/functions.nf | 68 ++++++++++++++++++++++++++ modules/hifiasm/main.nf | 65 ++++++++++++++++++++++++ modules/hifiasm/meta.yml | 87 +++++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 6 +++ tests/modules/hifiasm/main.nf | 31 ++++++++++++ tests/modules/hifiasm/test.yml | 33 +++++++++++++ 7 files changed, 294 insertions(+) create mode 100644 modules/hifiasm/functions.nf create mode 100644 modules/hifiasm/main.nf create mode 100644 modules/hifiasm/meta.yml create mode 100644 tests/modules/hifiasm/main.nf create mode 100644 tests/modules/hifiasm/test.yml diff --git a/modules/hifiasm/functions.nf b/modules/hifiasm/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/hifiasm/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf new file mode 100644 index 00000000..5d005ee4 --- /dev/null +++ b/modules/hifiasm/main.nf @@ -0,0 +1,65 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process HIFIASM { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::hifiasm=0.15.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0" + } else { + container "quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0" + } + + input: + tuple val(meta), path(reads) + path paternal_kmer_dump + path maternal_kmer_dump + val use_parental_kmers + + output: + tuple val(meta), path("*.r_utg.gfa") , emit: raw_unitigs + tuple val(meta), path("*.ec.bin") , emit: corrected_reads + tuple val(meta), path("*.ovlp.source.bin") , emit: source_overlaps + tuple val(meta), path("*.ovlp.reverse.bin"), emit: reverse_overlaps + tuple val(meta), path("*.p_utg.gfa") , emit: processed_unitigs, optional: true + tuple val(meta), path("*.asm.p_ctg.gfa") , emit: primary_contigs , optional: true + tuple val(meta), path("*.asm.a_ctg.gfa") , emit: alternate_contigs, optional: true + tuple val(meta), path("*.hap1.p_ctg.gfa") , emit: paternal_contigs , optional: true + tuple val(meta), path("*.hap2.p_ctg.gfa") , emit: maternal_contigs , optional: true + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if (use_parental_kmers) { + """ + hifiasm \\ + $options.args \\ + -o ${prefix}.asm \\ + -t $task.cpus \\ + -1 $paternal_kmer_dump \\ + -2 $maternal_kmer_dump \\ + $reads + + echo \$(hifiasm --version 2>&1) > ${software}.version.txt + """ + } else { // Phasing with Hi-C data is not supported yet + """ + hifiasm \\ + $options.args \\ + -o ${prefix}.asm \\ + -t $task.cpus \\ + $reads + + echo \$(hifiasm --version 2>&1) > ${software}.version.txt + """ + } +} diff --git a/modules/hifiasm/meta.yml b/modules/hifiasm/meta.yml new file mode 100644 index 00000000..dc414b93 --- /dev/null +++ b/modules/hifiasm/meta.yml @@ -0,0 +1,87 @@ +name: hifiasm +description: Whole-genome assembly using PacBio HiFi reads +keywords: + - genome assembly + - haplotype resolution + - phasing + - PacBio + - HiFi + - long reads +tools: + - hifiasm: + description: Haplotype-resolved assembler for accurate HiFi reads + homepage: https://github.com/chhylp123/hifiasm + documentation: https://github.com/chhylp123/hifiasm + tool_dev_url: https://github.com/chhylp123/hifiasm + doi: "10.1038/s41592-020-01056-5" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file with PacBio HiFi reads + pattern: "*.{fastq}" + - paternal_kmer_dump: + type: file + description: Yak kmer dump file for paternal reads (can be used for haplotype resolution). It can have an arbitrary extension. + - maternal_kmer_dump: + type: file + description: Yak kmer dump file for maternal reads (can be used for haplotype resolution). It can have an arbitrary extension. + - use_parental_kmers: + type: logical + description: A flag (true or false) signalling if the module should use the paternal and maternal kmer dumps. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - raw_unitigs: + type: file + description: Raw unitigs + pattern: "*.r_utg.gfa" + - processed_unitigs: + type: file + description: Processed unitigs + pattern: "*.p_utg.gfa" + - primary_contigs: + type: file + description: Primary contigs + pattern: "*.asm.p_ctg.gfa" + - alternate_contigs: + type: file + description: Alternative contigs + pattern: "*.asm.a_ctg.gfa" + - paternal_contigs: + type: file + description: Paternal contigs + pattern: "*.hap1.p_ctg.gfa" + - maternal_contigs: + type: file + description: Maternal contigs + pattern: "*.hap2.p_ctg.gfa" + - corrected_reads: + type: file + description: Corrected reads + pattern: "*.ec.bin" + - source_overlaps: + type: file + description: Source overlaps + pattern: "*.ovlp.source.bin" + - reverse_overlaps: + type: file + description: Reverse overlaps + pattern: "*.ovlp.reverse.bin" + +authors: + - "@sidorov-si" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 92748eca..d56a8695 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -346,6 +346,10 @@ gunzip: - modules/gunzip/** - tests/modules/gunzip/** +hifiasm: + - modules/hifiasm/** + - tests/modules/hifiasm/** + hisat2/align: - modules/hisat2/align/** - modules/hisat2/build/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 314a2329..7b6e3cfd 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -158,6 +158,12 @@ params { test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_1.fastq.gz" test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_2.fastq.gz" + + test_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test.yak" + test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" + } + 'pacbio' { + test_hifi_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/test_hifi.fastq.gz" } } } diff --git a/tests/modules/hifiasm/main.nf b/tests/modules/hifiasm/main.nf new file mode 100644 index 00000000..aeb64fb2 --- /dev/null +++ b/tests/modules/hifiasm/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HIFIASM } from '../../../modules/hifiasm/main.nf' addParams( options: [args:'-f0'] ) + +/* + * Test with long reads only + */ +workflow test_hifiasm_hifi_only { + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['homo_sapiens']['pacbio']['test_hifi_fastq_gz'], checkIfExists: true) ] + ] + + HIFIASM ( input, [], [], false ) +} + +/* + * Test with parental reads for phasing + */ +workflow test_hifiasm_with_parental_reads { + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['homo_sapiens']['pacbio']['test_hifi_fastq_gz'], checkIfExists: true) ] + ] + paternal_kmer_dump = file(params.test_data['homo_sapiens']['illumina']['test_yak'], checkIfExists: true) + maternal_kmer_dump = file(params.test_data['homo_sapiens']['illumina']['test2_yak'], checkIfExists: true) + + HIFIASM ( input, paternal_kmer_dump, maternal_kmer_dump, true ) +} diff --git a/tests/modules/hifiasm/test.yml b/tests/modules/hifiasm/test.yml new file mode 100644 index 00000000..47d9e38f --- /dev/null +++ b/tests/modules/hifiasm/test.yml @@ -0,0 +1,33 @@ +- name: hifiasm test_hifiasm_hifi_only + command: nextflow run tests/modules/hifiasm -entry test_hifiasm_hifi_only -c tests/config/nextflow.config + tags: + - hifiasm + files: + - path: output/hifiasm/test.asm.bp.hap1.p_ctg.gfa + md5sum: 73b0c1b01d445db91c269034b8660501 + - path: output/hifiasm/test.asm.bp.hap2.p_ctg.gfa + md5sum: 76847b7835185ab92611a820467c0066 + - path: output/hifiasm/test.asm.bp.p_utg.gfa + md5sum: 97446fb95de214835c36b10a90838486 + - path: output/hifiasm/test.asm.bp.r_utg.gfa + md5sum: 97446fb95de214835c36b10a90838486 + - path: output/hifiasm/test.asm.ec.bin + - path: output/hifiasm/test.asm.ovlp.reverse.bin + - path: output/hifiasm/test.asm.ovlp.source.bin + +- name: hifiasm test_hifiasm_with_parental_reads + command: nextflow run tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c tests/config/nextflow.config + tags: + - hifiasm + files: + - path: output/hifiasm/test.asm.dip.hap1.p_ctg.gfa + md5sum: 16907b1aea2081884deb9d039dd14038 + - path: output/hifiasm/test.asm.dip.hap2.p_ctg.gfa + md5sum: d283479bf72a31d0cc34f880535d0bd4 + - path: output/hifiasm/test.asm.dip.p_utg.gfa + md5sum: 97446fb95de214835c36b10a90838486 + - path: output/hifiasm/test.asm.dip.r_utg.gfa + md5sum: 97446fb95de214835c36b10a90838486 + - path: output/hifiasm/test.asm.ec.bin + - path: output/hifiasm/test.asm.ovlp.reverse.bin + - path: output/hifiasm/test.asm.ovlp.source.bin From 10502399ad50e061afe443a872fb49722af57e34 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 10:00:48 +0200 Subject: [PATCH 016/314] Replaced param with input val channel (#595) * Replaced param with input val channel * Apply suggestions from code review Co-authored-by: Jose Espinosa-Carrasco Co-authored-by: Harshil Patel * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review (missed one) * YAML lint Co-authored-by: Jose Espinosa-Carrasco Co-authored-by: Harshil Patel --- modules/fastp/main.nf | 5 ++-- tests/modules/fastp/main.nf | 30 ++++++++++++++++++++++-- tests/modules/fastp/test.yml | 45 ++++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 4 deletions(-) diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index 6d703615..acba864a 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -20,6 +20,7 @@ process FASTP { input: tuple val(meta), path(reads) + val save_trimmed_fail output: tuple val(meta), path('*.trim.fastq.gz'), emit: reads @@ -34,7 +35,7 @@ process FASTP { def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { - def fail_fastq = params.save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' + def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz fastp \\ @@ -49,7 +50,7 @@ process FASTP { echo \$(fastp --version 2>&1) | sed -e "s/fastp //g" > ${software}.version.txt """ } else { - def fail_fastq = params.save_trimmed_fail ? "--unpaired1 ${prefix}_1.fail.fastq.gz --unpaired2 ${prefix}_2.fail.fastq.gz" : '' + def fail_fastq = save_trimmed_fail ? "--unpaired1 ${prefix}_1.fail.fastq.gz --unpaired2 ${prefix}_2.fail.fastq.gz" : '' """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz diff --git a/tests/modules/fastp/main.nf b/tests/modules/fastp/main.nf index 012f4c6c..f4129c09 100644 --- a/tests/modules/fastp/main.nf +++ b/tests/modules/fastp/main.nf @@ -11,8 +11,9 @@ workflow test_fastp_single_end { input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] + save_trimmed_fail = false - FASTP ( input ) + FASTP ( input, save_trimmed_fail ) } // @@ -23,7 +24,32 @@ workflow test_fastp_paired_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + save_trimmed_fail = false - FASTP ( input ) + FASTP ( input, save_trimmed_fail ) } +// +// Test with single-end data with saving trimming fails +// +workflow test_fastp_single_end_trim_fail { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + save_trimmed_fail = true + + FASTP ( input, save_trimmed_fail ) +} + +// +// Test with paired-end data with saving trimming fails +// +workflow test_fastp_paired_end_trim_fail { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + save_trimmed_fail = true + + FASTP ( input, save_trimmed_fail ) +} diff --git a/tests/modules/fastp/test.yml b/tests/modules/fastp/test.yml index eb9de964..a6e253af 100644 --- a/tests/modules/fastp/test.yml +++ b/tests/modules/fastp/test.yml @@ -36,3 +36,48 @@ md5sum: e2257263668dc8a75d95475099fb472d - path: output/fastp/test_2.trim.fastq.gz md5sum: 9eff7203596580cc5e42aceab4a469df + +- name: fastp test_fastp_single_end_trim_fail + command: nextflow run tests/modules/fastp -entry test_fastp_single_end_trim_fail -c tests/config/nextflow.config + tags: + - fastp + files: + - path: output/fastp/test.fastp.html + contains: + - "Q20 bases:12.922000 K (92.984097%)" + - "single end (151 cycles)" + - path: output/fastp/test.fastp.log + contains: + - "Q20 bases: 12922(92.9841%)" + - "reads passed filter: 99" + - path: output/fastp/test.trim.fastq.gz + md5sum: e2257263668dc8a75d95475099fb472d + - path: output/fastp/test.fastp.json + md5sum: ee65a46d6e59fa556f112727b8a902ce + - path: output/fastp/test.fail.fastq.gz + md5sum: de315d397c994d8e66bafc7a8dc11070 + +- name: fastp test_fastp_paired_end_trim_fail + command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c tests/config/nextflow.config + tags: + - fastp + files: + - path: output/fastp/test.fastp.html + contains: + - "Q20 bases:25.719000 K (93.033098%)" + - "The input has little adapter percentage (~0.000000%), probably it's trimmed before." + - path: output/fastp/test.fastp.log + contains: + - "No adapter detected for read1" + - "Q30 bases: 12281(88.3716%)" + - path: output/fastp/test.fastp.json + contains: + - '"passed_filter_reads": 198' + - path: output/fastp/test_1.trim.fastq.gz + md5sum: e2257263668dc8a75d95475099fb472d + - path: output/fastp/test_2.trim.fastq.gz + md5sum: 9eff7203596580cc5e42aceab4a469df + - path: output/fastp/test_1.fail.fastq.gz + md5sum: e62ff0123a74adfc6903d59a449cbdb0 + - path: output/fastp/test_2.fail.fastq.gz + md5sum: f52309b35a7c15cbd56a9c3906ef98a5 From f892f273fe55f545b2f851e772c0999f49d9a798 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 12:03:49 +0200 Subject: [PATCH 017/314] Updated optional input definition (#599) * Updated optional input definition * Update README.md Co-authored-by: Harshil Patel Co-authored-by: Harshil Patel --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9e06162c..2e78d970 100644 --- a/README.md +++ b/README.md @@ -443,7 +443,7 @@ using a combination of `bwa` and `samtools` to output a BAM file instead of a SA - Named file extensions MUST be emitted for ALL output channels e.g. `path "*.txt", emit: txt`. -- Optional inputs are not currently supported by Nextflow. However, "fake files" MAY be used to work around this issue. +- Optional inputs are not currently supported by Nextflow. However, passing an empty list (`[]`) instead of a file as a module parameter can be used to work around this issue. #### Resource requirements From 1a26d48104a94089f1012630790a17736ffb5546 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 13:48:07 +0200 Subject: [PATCH 018/314] module: picard sortsam (#603) * Add picard/sortsam module * Fix container links * Changes after code review * Input meta in the right place --- modules/picard/sortsam/functions.nf | 68 +++++++++++++++++++++++++++ modules/picard/sortsam/main.nf | 49 +++++++++++++++++++ modules/picard/sortsam/meta.yml | 47 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/picard/sortsam/main.nf | 14 ++++++ tests/modules/picard/sortsam/test.yml | 8 ++++ 6 files changed, 190 insertions(+) create mode 100644 modules/picard/sortsam/functions.nf create mode 100644 modules/picard/sortsam/main.nf create mode 100644 modules/picard/sortsam/meta.yml create mode 100644 tests/modules/picard/sortsam/main.nf create mode 100644 tests/modules/picard/sortsam/test.yml diff --git a/modules/picard/sortsam/functions.nf b/modules/picard/sortsam/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/picard/sortsam/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf new file mode 100644 index 00000000..dc2a8136 --- /dev/null +++ b/modules/picard/sortsam/main.nf @@ -0,0 +1,49 @@ + +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PICARD_SORTSAM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::picard=2.25.6" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/picard:2.25.6--hdfd78af_0" + } else { + container "quay.io/biocontainers/picard:2.25.6--hdfd78af_0" + } + + input: + tuple val(meta), path(bam) + val sort_order + + output: + tuple val(meta), path("*.sorted.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + SortSam \\ + -Xmx${avail_mem}g \\ + --INPUT $bam \\ + --OUTPUT ${prefix}.sorted.bam \\ + --SORT_ORDER $sort_order + + echo \$(picard SortSam --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + """ +} diff --git a/modules/picard/sortsam/meta.yml b/modules/picard/sortsam/meta.yml new file mode 100644 index 00000000..ea4b2c89 --- /dev/null +++ b/modules/picard/sortsam/meta.yml @@ -0,0 +1,47 @@ +name: picard_sortsam +description: Sorts BAM/SAM files based on a variety of picard specific criteria +keywords: + - sort + - bam + - sam +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,sam}" + - sort_order: + type: value + description: Picard sort order type + pattern: "unsorted|queryname|coordinate|duplicate|unknown" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: Sorted BAM/CRAM/SAM file + pattern: "*sorted.{bam}" + + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d56a8695..2604d8e6 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -567,6 +567,10 @@ picard/mergesamfiles: - modules/picard/mergesamfiles/** - tests/modules/picard/mergesamfiles/** +picard/sortsam: + - modules/picard/sortsam/** + - tests/modules/picard/sortsam/** + plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** diff --git a/tests/modules/picard/sortsam/main.nf b/tests/modules/picard/sortsam/main.nf new file mode 100644 index 00000000..71ae75d6 --- /dev/null +++ b/tests/modules/picard/sortsam/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [:] ) + +workflow test_picard_sortsam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ] + sort_order = "queryname" + + PICARD_SORTSAM ( input, sort_order ) +} diff --git a/tests/modules/picard/sortsam/test.yml b/tests/modules/picard/sortsam/test.yml new file mode 100644 index 00000000..4443228e --- /dev/null +++ b/tests/modules/picard/sortsam/test.yml @@ -0,0 +1,8 @@ +- name: picard sortsam + command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c tests/config/nextflow.config + tags: + - picard + - picard/sortsam + files: + - path: output/picard/test.sorted.bam + md5sum: b44a6ca04811a9470c7813c3c9465fd5 From f43778b0e641a9643ae73fe1f2fe2d5bb81edffc Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Wed, 21 Jul 2021 14:30:52 +0200 Subject: [PATCH 019/314] Updated the version of STAR in align and genomegenerate modules (#604) * Updated the version of STAR in align and genomegenerate modules * Changes in test.yml * Changes in test.yml Co-authored-by: Harshil Patel --- modules/star/align/main.nf | 6 +- modules/star/genomegenerate/main.nf | 6 +- tests/modules/star/align/main.nf | 35 ++-- tests/modules/star/align/test.yml | 179 ++++++++++++++------- tests/modules/star/genomegenerate/test.yml | 62 +++---- 5 files changed, 184 insertions(+), 104 deletions(-) diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index e85ddb79..6e085f9b 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -12,11 +12,11 @@ process STAR_ALIGN { saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Note: 2.7X indices incompatible with AWS iGenomes. - conda (params.enable_conda ? 'bioconda::star=2.6.1d' : null) + conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/star:2.6.1d--0' + container 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' } else { - container 'quay.io/biocontainers/star:2.6.1d--0' + container 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' } input: diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index b440b415..9335b9b5 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -12,11 +12,11 @@ process STAR_GENOMEGENERATE { saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. - conda (params.enable_conda ? "bioconda::star=2.6.1d bioconda::samtools=1.10 conda-forge::gawk=5.1.0" : null) + conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.13 conda-forge::gawk=5.1.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:59cdd445419f14abac76b31dd0d71217994cbcc9-0" + container "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" } else { - container "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:59cdd445419f14abac76b31dd0d71217994cbcc9-0" + container "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" } input: diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index d280aeae..2a68d7cd 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,28 +2,43 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9'] ) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9'] ) +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'] ) +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) + workflow test_star_alignment_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file("${launchDir}/tests/data/generic/fastq/test_single_end.fastq.gz", checkIfExists: true) ] + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] ] - fasta = file("${launchDir}/tests/data/generic/fasta/GCF_000019425.1_ASM1942v1_genomic.fna", checkIfExists: true) - gtf = file("${launchDir}/tests/data/generic/gtf/GCF_000019425.1_ASM1942v1_genomic.gtf", checkIfExists: true) - + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + STAR_GENOMEGENERATE ( fasta, gtf ) STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) } workflow test_star_alignment_paired_end { input = [ [ id:'test', single_end:false ], // meta map - [ file("${launchDir}/tests/data/generic/fastq/test_R1.fastq.gz", checkIfExists: true), - file("${launchDir}/tests/data/generic/fastq/test_R2.fastq.gz", checkIfExists: true) ] + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] ] - fasta = file("${launchDir}/tests/data/generic/fasta/GCF_000019425.1_ASM1942v1_genomic.fna", checkIfExists: true) - gtf = file("${launchDir}/tests/data/generic/gtf/GCF_000019425.1_ASM1942v1_genomic.gtf", checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) STAR_GENOMEGENERATE ( fasta, gtf ) STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) } + + +workflow test_star_alignment_paired_end_for_fusion { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STAR_GENOMEGENERATE ( fasta, gtf ) + STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) +} diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index d6bfb7fa..87413c2c 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -1,73 +1,132 @@ -- name: star align single-end - command: nextflow run ./tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config +- name: star align test_star_alignment_single_end + command: nextflow run tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config tags: - star - star/align files: - - path: output/star/star/Genome - md5sum: 323c992bac354f93073ce0fc43f222f8 - - path: output/star/star/SA - md5sum: 3e70e4fc6d031e1915bb510727f2c559 - - path: output/star/star/SAindex - md5sum: a94198b95a245d4f64af2a7133b6ec7b - - path: output/star/star/chrLength.txt - md5sum: f2bea3725fe1c01420c57fb73bdeb31a - - path: output/star/star/chrNameLength.txt - md5sum: c7ceb0a8827b2ea91c386933bee48742 - - path: output/star/star/chrStart.txt - md5sum: faf5c55020c99eceeef3e34188ac0d2f - - path: output/star/star/exonGeTrInfo.tab - md5sum: aec6e7a1ae3fc8c638ce5a9ce9c886b6 - - path: output/star/star/exonInfo.tab - md5sum: 42eca6ebc2dc72d9d6e6b3acd3714343 - - path: output/star/star/genomeParameters.txt - md5sum: 05e1041cbfb7f81686e17bc80b3ddcea - - path: output/star/star/sjdbInfo.txt - md5sum: 1082ab459363b3f2f7aabcef0979c1ed - - path: output/star/star/sjdbList.fromGTF.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/star/star/sjdbList.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/star/star/transcriptInfo.tab - md5sum: 8fbe69abbbef4f89da3854873984dbac + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 2a0c675d8b91d8e5e8c1826d3500482e + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 3097677f4d8b2cb66770b9e55d343a7f + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: b7f113f12ff62e09d16fa0ace290d03e + md5sum: 509d7f1fba3350913c8ea13f01917085 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e -- name: star align paired-end - command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config +- name: star align test_star_alignment_paired_end + command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config tags: - star - star/align files: - - path: output/star/star/Genome - md5sum: 323c992bac354f93073ce0fc43f222f8 - - path: output/star/star/SA - md5sum: 3e70e4fc6d031e1915bb510727f2c559 - - path: output/star/star/SAindex - md5sum: a94198b95a245d4f64af2a7133b6ec7b - - path: output/star/star/chrLength.txt - md5sum: f2bea3725fe1c01420c57fb73bdeb31a - - path: output/star/star/chrNameLength.txt - md5sum: c7ceb0a8827b2ea91c386933bee48742 - - path: output/star/star/chrStart.txt - md5sum: faf5c55020c99eceeef3e34188ac0d2f - - path: output/star/star/exonGeTrInfo.tab - md5sum: aec6e7a1ae3fc8c638ce5a9ce9c886b6 - - path: output/star/star/exonInfo.tab - md5sum: 42eca6ebc2dc72d9d6e6b3acd3714343 - - path: output/star/star/genomeParameters.txt - md5sum: 05e1041cbfb7f81686e17bc80b3ddcea - - path: output/star/star/sjdbInfo.txt - md5sum: 1082ab459363b3f2f7aabcef0979c1ed - - path: output/star/star/sjdbList.fromGTF.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/star/star/sjdbList.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/star/star/transcriptInfo.tab - md5sum: 8fbe69abbbef4f89da3854873984dbac + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 2a0c675d8b91d8e5e8c1826d3500482e + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 3097677f4d8b2cb66770b9e55d343a7f + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: a1f92e8dbeb954b6b8d3d7cc6b9814fb + md5sum: 64b408fb1d61e2de8ff51c847cd5bc52 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: star align test_star_alignment_paired_end_for_fusion + command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c tests/config/nextflow.config + tags: + - star + - star/align + files: + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 2a0c675d8b91d8e5e8c1826d3500482e + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 3097677f4d8b2cb66770b9e55d343a7f + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 + - path: output/star/test.Aligned.out.bam + md5sum: d724ca90a102347b9c5052a33ea4d308 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out + - path: output/star/test.SJ.out.tab + md5sum: 5155c9fd1f787ad6d7d80987fb06219c diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index 4e9c2247..0a4bff80 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -1,31 +1,37 @@ -- name: star genomegenerate - command: nextflow run ./tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config +- name: star genomegenerate test_star_genomegenerate + command: nextflow run tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config tags: - - star - star/genomegenerate + - star files: - - path: ./output/index/star/Genome - md5sum: 323c992bac354f93073ce0fc43f222f8 - - path: ./output/index/star/SA - md5sum: 3e70e4fc6d031e1915bb510727f2c559 - - path: ./output/index/star/SAindex - md5sum: a94198b95a245d4f64af2a7133b6ec7b - - path: ./output/index/star/chrLength.txt - md5sum: f2bea3725fe1c01420c57fb73bdeb31a - - path: ./output/index/star/chrNameLength.txt - md5sum: c7ceb0a8827b2ea91c386933bee48742 - - path: ./output/index/star/chrStart.txt - md5sum: faf5c55020c99eceeef3e34188ac0d2f - - path: ./output/index/star/exonGeTrInfo.tab - md5sum: aec6e7a1ae3fc8c638ce5a9ce9c886b6 - - path: ./output/index/star/exonInfo.tab - md5sum: 42eca6ebc2dc72d9d6e6b3acd3714343 - - path: ./output/index/star/sjdbInfo.txt - md5sum: 1082ab459363b3f2f7aabcef0979c1ed - - path: ./output/index/star/sjdbList.fromGTF.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: ./output/index/star/sjdbList.out.tab - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: ./output/index/star/transcriptInfo.tab - md5sum: 8fbe69abbbef4f89da3854873984dbac - - path: ./output/index/star/genomeParameters.txt + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: d0fbf2789ee1e9f60c352ba3655d9de4 + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 5a1ec027e575c3d7c1851e6b80fb8c5d + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 From a0019d41754f750a380084b2d0a015b88af02053 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 15:34:21 +0200 Subject: [PATCH 020/314] fix: picard sortsam (#605) * Add picard/sortsam module * Fix container links * Changes after code review * Input meta in the right place * Correct output file suffix * Define only `bam` in output tuple --- modules/picard/sortsam/main.nf | 4 ++-- tests/modules/picard/sortsam/main.nf | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index dc2a8136..2af28496 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -24,7 +24,7 @@ process PICARD_SORTSAM { val sort_order output: - tuple val(meta), path("*.sorted.bam"), emit: bam + tuple val(meta), path("*.bam"), emit: bam path "*.version.txt" , emit: version script: @@ -41,7 +41,7 @@ process PICARD_SORTSAM { SortSam \\ -Xmx${avail_mem}g \\ --INPUT $bam \\ - --OUTPUT ${prefix}.sorted.bam \\ + --OUTPUT ${prefix}.bam \\ --SORT_ORDER $sort_order echo \$(picard SortSam --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt diff --git a/tests/modules/picard/sortsam/main.nf b/tests/modules/picard/sortsam/main.nf index 71ae75d6..0130fad6 100644 --- a/tests/modules/picard/sortsam/main.nf +++ b/tests/modules/picard/sortsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [:] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) workflow test_picard_sortsam { From 217303f5c1a92effb8a97c29294ee9f2e19f697e Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Wed, 21 Jul 2021 15:38:40 +0200 Subject: [PATCH 021/314] Add option -p to set the # of cpus on stringtie (#601) * Add option -p to set the # of cpus on stringtie * Bump version 2.1.7 to stringtie modules * Output stringtie/merge version * Fix padding * Apply suggestions from code review * Defining software variable * Fix test, gff can't be md5 check, contains instead Co-authored-by: Harshil Patel --- modules/stringtie/merge/main.nf | 10 +++++++--- modules/stringtie/stringtie/main.nf | 9 +++++---- tests/modules/stringtie/merge/test.yml | 10 ++++++++-- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index 3c88d494..f0820be1 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -11,11 +11,11 @@ process STRINGTIE_MERGE { saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. - conda (params.enable_conda ? "bioconda::stringtie=2.1.4" : null) + conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.4--h7e0af3c_0" + container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" } else { - container "quay.io/biocontainers/stringtie:2.1.4--h7e0af3c_0" + container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" } input: @@ -24,12 +24,16 @@ process STRINGTIE_MERGE { output: path "stringtie.merged.gtf", emit: gtf + path "*.version.txt" , emit: version script: + def software = getSoftwareName(task.process) """ stringtie \\ --merge $stringtie_gtf \\ -G $annotation_gtf \\ -o stringtie.merged.gtf + + echo \$(stringtie --version 2>&1) > ${software}.version.txt """ } diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index eb751321..6cff993a 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -11,11 +11,11 @@ process STRINGTIE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::stringtie=2.1.4" : null) + conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.4--h7e0af3c_0" + container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" } else { - container "quay.io/biocontainers/stringtie:2.1.4--h7e0af3c_0" + container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" } input: @@ -48,8 +48,9 @@ process STRINGTIE { -A ${prefix}.gene.abundance.txt \\ -C ${prefix}.coverage.gtf \\ -b ${prefix}.ballgown \\ + -p $task.cpus \\ $options.args - stringtie --version > ${software}.version.txt + echo \$(stringtie --version 2>&1) > ${software}.version.txt """ } diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index 57488377..e49122be 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -5,7 +5,10 @@ - stringtie/merge files: - path: ./output/test_stringtie_forward_merge/stringtie.merged.gtf - md5sum: 676aa20a2d7a3db18136cdc7ba183099 + contains: + - 'stringtie' + - 'merge' + - 'chr22' - name: stringtie reverse-strand merge command: nextflow run ./tests/modules/stringtie/merge/ -entry test_stringtie_reverse_merge -c tests/config/nextflow.config @@ -14,4 +17,7 @@ - stringtie/merge files: - path: ./output/test_stringtie_reverse_merge/stringtie.merged.gtf - md5sum: 67e5102722ecaeea1fb44d1ec0953474 + contains: + - 'stringtie' + - 'merge' + - 'chr22' From 2d26b037a11190e65a060ac5c6809a458ff5536b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 15:51:33 +0200 Subject: [PATCH 022/314] (another) fix: picard sortsam (#606) * Add picard/sortsam module * Fix container links * Changes after code review * Input meta in the right place * Correct output file suffix * Define only `bam` in output tuple * Correct output meta Co-authored-by: Harshil Patel --- modules/picard/sortsam/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/picard/sortsam/meta.yml b/modules/picard/sortsam/meta.yml index ea4b2c89..42de6eab 100644 --- a/modules/picard/sortsam/meta.yml +++ b/modules/picard/sortsam/meta.yml @@ -40,7 +40,7 @@ output: - bam: type: file description: Sorted BAM/CRAM/SAM file - pattern: "*sorted.{bam}" + pattern: "*.{bam}" authors: From 3cc43838e1e05f650845e7f668a18714d5b63efe Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 21 Jul 2021 16:22:19 +0200 Subject: [PATCH 023/314] module: picard filtersamreads (#602) * Start work filtersamreads * Refactored to allow optional input * Use proper readlist test data * Remove typo * Fix if else condition * Remove debugging code * Fix container URLs * Add required input specification meta * Cleanup * Apply suggestions from code review Co-authored-by: Harshil Patel * Fix suffixing * Additional formatting tweaks * Update modules/picard/filtersamreads/main.nf Co-authored-by: Harshil Patel * Update modules/picard/filtersamreads/meta.yml Co-authored-by: Harshil Patel --- modules/picard/filtersamreads/functions.nf | 68 ++++++++++++++++++++ modules/picard/filtersamreads/main.nf | 65 +++++++++++++++++++ modules/picard/filtersamreads/meta.yml | 51 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 +- tests/modules/picard/filtersamreads/main.nf | 27 ++++++++ tests/modules/picard/filtersamreads/test.yml | 18 ++++++ 7 files changed, 236 insertions(+), 1 deletion(-) create mode 100644 modules/picard/filtersamreads/functions.nf create mode 100644 modules/picard/filtersamreads/main.nf create mode 100644 modules/picard/filtersamreads/meta.yml create mode 100644 tests/modules/picard/filtersamreads/main.nf create mode 100644 tests/modules/picard/filtersamreads/test.yml diff --git a/modules/picard/filtersamreads/functions.nf b/modules/picard/filtersamreads/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/picard/filtersamreads/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf new file mode 100644 index 00000000..b7c00349 --- /dev/null +++ b/modules/picard/filtersamreads/main.nf @@ -0,0 +1,65 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PICARD_FILTERSAMREADS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::picard=2.25.6" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/picard:2.25.6--hdfd78af_0" + } else { + container "quay.io/biocontainers/picard:2.25.6--hdfd78af_0" + } + + input: + tuple val(meta), path(bam) + val filter + path readlist + + output: + tuple val(meta), path("*.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + if ( filter == 'includeAligned' || filter == 'excludeAligned' ) { + """ + picard \\ + FilterSamReads \\ + -Xmx${avail_mem}g \\ + --INPUT $bam \\ + --OUTPUT ${prefix}.bam \\ + --FILTER $filter \\ + $options.args + + echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + """ + } else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) { + """ + picard \\ + FilterSamReads \\ + -Xmx${avail_mem}g \\ + --INPUT $bam \\ + --OUTPUT ${prefix}.bam \\ + --FILTER $filter \\ + --READ_LIST_FILE $readlist \\ + $options.args + + echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + """ + } +} diff --git a/modules/picard/filtersamreads/meta.yml b/modules/picard/filtersamreads/meta.yml new file mode 100644 index 00000000..b5beba90 --- /dev/null +++ b/modules/picard/filtersamreads/meta.yml @@ -0,0 +1,51 @@ +name: picard_filtersamreads +description: Filters SAM/BAM files to include/exclude either aligned/unaligned reads or based on a read list +keywords: + - bam + - filter +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + tool_dev_url: https://github.com/broadinstitute/picard + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: List of BAM files. If filtering without read list must be sorted by queryname with picard sortsam + pattern: "*.{bam}" + - filter: + type: value + description: Picard filter type + pattern: "includeAligned|excludeAligned|includeReadList|excludeReadList" + - readlist: + type: file + description: Optional text file containing reads IDs to include or exclude + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Filtered BAM file + pattern: "*.{bam}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2604d8e6..46dd9141 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -559,6 +559,10 @@ picard/collectwgsmetrics: - modules/picard/collectwgsmetrics/** - tests/modules/picard/collectwgsmetrics/** +picard/filtersamreads: + - modules/picard/filtersamreads/** + - tests/modules/picard/filtersamreads/** + picard/markduplicates: - modules/picard/markduplicates/** - tests/modules/picard/markduplicates/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 7b6e3cfd..3f86d7ba 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -74,12 +74,14 @@ params { scaffolds_fasta = "${test_data_dir}/genomics/sarscov2/illumina/fasta/scaffolds.fasta" assembly_gfa = "${test_data_dir}/genomics/sarscov2/illumina/gfa/assembly.gfa" + + test_single_end_bam_readlist_txt = "${test_data_dir}/genomics/sarscov2/illumina/picard/test.single_end.bam.readlist.txt" } 'nanopore' { test_sorted_bam = "${test_data_dir}/genomics/sarscov2/nanopore/bam/test.sorted.bam" test_sorted_bam_bai = "${test_data_dir}/genomics/sarscov2/nanopore/bam/test.sorted.bam.bai" - fast5_tar_gz = "${test_data_dir}/genomics/sarscov2/nanopore/fast5/fast5.tar.gz" + fast5_tar_gz = "${test_data_dir}/genomics/sarscov2/nanopore/fast5/fast5.tar.gz" test_fastq_gz = "${test_data_dir}/genomics/sarscov2/nanopore/fastq/test.fastq.gz" diff --git a/tests/modules/picard/filtersamreads/main.nf b/tests/modules/picard/filtersamreads/main.nf new file mode 100644 index 00000000..b09594cc --- /dev/null +++ b/tests/modules/picard/filtersamreads/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) +include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' addParams( options: [suffix:'.filtered'] ) + +workflow test_picard_filtersamreads { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ] + sort_order = 'queryname' + filter = 'includeAligned' + + PICARD_SORTSAM ( input, sort_order ) + PICARD_FILTERSAMREADS ( PICARD_SORTSAM.out.bam, filter, [] ) +} + +workflow test_picard_filtersamreads_readlist { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ] + filter = 'includeReadList' + readlist = file(params.test_data['sarscov2']['illumina']['test_single_end_bam_readlist_txt'], checkIfExists: true) + + PICARD_FILTERSAMREADS ( input, filter, readlist ) +} diff --git a/tests/modules/picard/filtersamreads/test.yml b/tests/modules/picard/filtersamreads/test.yml new file mode 100644 index 00000000..34dd85c4 --- /dev/null +++ b/tests/modules/picard/filtersamreads/test.yml @@ -0,0 +1,18 @@ +- name: picard filtersamreads + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c tests/config/nextflow.config + tags: + - picard + - picard/filtersamreads + files: + - path: output/picard/test.filtered.bam + md5sum: b44a6ca04811a9470c7813c3c9465fd5 + + +- name: picard filtersamreads_readlist + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config + tags: + - picard + - picard/filtersamreads + files: + - path: output/picard/test.filtered.bam + md5sum: 1e86b738b56f2c2b09f4cab52baf05c7 From 553f51d6bfdb376079d8ff913ee75ea809e081a3 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 22 Jul 2021 12:19:51 +0200 Subject: [PATCH 024/314] fix: picard filtersamreads input (#610) * Move readlist into same input channel as bam * Update test reflecting input restructuring * Update tests/modules/picard/filtersamreads/main.nf Co-authored-by: Harshil Patel * fix test Co-authored-by: Harshil Patel --- modules/picard/filtersamreads/main.nf | 3 +-- tests/modules/picard/filtersamreads/main.nf | 13 +++++++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index b7c00349..c22bbaa3 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -19,9 +19,8 @@ process PICARD_FILTERSAMREADS { } input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(readlist) val filter - path readlist output: tuple val(meta), path("*.bam"), emit: bam diff --git a/tests/modules/picard/filtersamreads/main.nf b/tests/modules/picard/filtersamreads/main.nf index b09594cc..a03471dd 100644 --- a/tests/modules/picard/filtersamreads/main.nf +++ b/tests/modules/picard/filtersamreads/main.nf @@ -13,15 +13,20 @@ workflow test_picard_filtersamreads { filter = 'includeAligned' PICARD_SORTSAM ( input, sort_order ) - PICARD_FILTERSAMREADS ( PICARD_SORTSAM.out.bam, filter, [] ) + PICARD_SORTSAM.out.bam + .map { + [ it[0], it[1], [] ] + } + .set{ ch_sorted_for_filtersamreads } + PICARD_FILTERSAMREADS ( ch_sorted_for_filtersamreads, filter ) } workflow test_picard_filtersamreads_readlist { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_single_end_bam_readlist_txt'], checkIfExists: true) ] filter = 'includeReadList' - readlist = file(params.test_data['sarscov2']['illumina']['test_single_end_bam_readlist_txt'], checkIfExists: true) - PICARD_FILTERSAMREADS ( input, filter, readlist ) + PICARD_FILTERSAMREADS ( input, filter ) } From 3cabc95d0ed8a5a4e07b8f9b1d1f7ff9a70f61e1 Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Thu, 22 Jul 2021 16:19:42 +0200 Subject: [PATCH 025/314] Added module arriba (#611) * Updated the version of STAR in align and genomegenerate modules * Changes in test.yml * Changes in test.yml * Added module arriba * Changes in test configs * Added module Arriba for fusion detection * Fixed review comments * Added an output option for discarded fusions * Resolved some conflits * conflicts * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/arriba/functions.nf | 68 ++++++++++++++++++++++++ modules/arriba/main.nf | 47 +++++++++++++++++ modules/arriba/meta.yml | 54 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/arriba/main.nf | 36 +++++++++++++ tests/modules/arriba/test.yml | 93 +++++++++++++++++++++++++++++++++ 6 files changed, 302 insertions(+) create mode 100644 modules/arriba/functions.nf create mode 100644 modules/arriba/main.nf create mode 100644 modules/arriba/meta.yml create mode 100644 tests/modules/arriba/main.nf create mode 100644 tests/modules/arriba/test.yml diff --git a/modules/arriba/functions.nf b/modules/arriba/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/arriba/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf new file mode 100644 index 00000000..739922ef --- /dev/null +++ b/modules/arriba/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ARRIBA { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::arriba=2.1.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1" + } else { + container "quay.io/biocontainers/arriba:2.1.0--h3198e80_1" + } + + input: + tuple val(meta), path(bam) + path fasta + path gtf + + output: + tuple val(meta), path("*.fusions.tsv") , emit: fusions + tuple val(meta), path("*.fusions.discarded.tsv"), emit: fusions_fail + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def blacklist = (options.args.contains('-b')) ? '' : '-f blacklist' + """ + arriba \\ + -x $bam \\ + -a $fasta \\ + -g $gtf \\ + -o ${prefix}.fusions.tsv \\ + -O ${prefix}.fusions.discarded.tsv \\ + $blacklist \\ + $options.args + + echo \$(arriba -h | grep 'Version:' 2>&1) | sed 's/Version:\s//' > ${software}.version.txt + """ +} diff --git a/modules/arriba/meta.yml b/modules/arriba/meta.yml new file mode 100644 index 00000000..370f82ec --- /dev/null +++ b/modules/arriba/meta.yml @@ -0,0 +1,54 @@ +name: arriba +description: Arriba is a command-line tool for the detection of gene fusions from RNA-Seq data. +keywords: + - fusion + - arriba +tools: + - arriba: + description: Fast and accurate gene fusion detection from RNA-Seq data + homepage: https://github.com/suhrig/arriba + documentation: https://arriba.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/suhrig/arriba + doi: "10.1101/gr.257246.119" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - fasta: + type: file + description: Assembly FASTA file + pattern: "*.{fasta}" + - gtf: + type: file + description: Annotation GTF file + pattern: "*.{gtf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - fusions: + type: file + description: File contains fusions which pass all of Arriba's filters. + pattern: "*.{fusions.tsv}" + - fusions_fail: + type: file + description: File contains fusions that Arriba classified as an artifact or that are also observed in healthy tissue. + pattern: "*.{fusions.discarded.tsv}" + +authors: + - "@praveenraj2018" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 46dd9141..429b0ebd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -10,6 +10,10 @@ allelecounter: - modules/allelecounter/** - tests/modules/allelecounter/** +arriba: + - modules/arriba/** + - tests/modules/arriba/** + artic/guppyplex: - modules/artic/guppyplex/** - tests/modules/artic/guppyplex/** diff --git a/tests/modules/arriba/main.nf b/tests/modules/arriba/main.nf new file mode 100644 index 00000000..833742d6 --- /dev/null +++ b/tests/modules/arriba/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 11'] ) +include { STAR_ALIGN } from '../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) +include { ARRIBA } from '../../../modules/arriba/main.nf' addParams( options: [:] ) + +workflow test_arriba_single_end { + + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STAR_GENOMEGENERATE ( fasta, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) +} + +workflow test_arriba_paired_end { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STAR_GENOMEGENERATE ( fasta, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) +} diff --git a/tests/modules/arriba/test.yml b/tests/modules/arriba/test.yml new file mode 100644 index 00000000..c1dc7c1e --- /dev/null +++ b/tests/modules/arriba/test.yml @@ -0,0 +1,93 @@ +- name: arriba test_arriba_single_end + command: nextflow run tests/modules/arriba -entry test_arriba_single_end -c tests/config/nextflow.config + tags: + - arriba + files: + - path: output/arriba/test.fusions.discarded.tsv + md5sum: cad8c215b938d1e45b747a5b7898a4c2 + - path: output/arriba/test.fusions.tsv + md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 9f085c626553b1c52f2827421972ac10 + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 9e42067b1ec70b773257529230dd7b3a + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 + - path: output/star/test.Aligned.out.bam + md5sum: 29c99195dcc79ff4df1f754ff16aac78 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out + - path: output/star/test.SJ.out.tab + +- name: arriba test_arriba_paired_end + command: nextflow run tests/modules/arriba -entry test_arriba_paired_end -c tests/config/nextflow.config + tags: + - arriba + files: + - path: output/arriba/test.fusions.discarded.tsv + md5sum: 85e36c887464e4deaa65f45174d3b8fd + - path: output/arriba/test.fusions.tsv + md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 9f085c626553b1c52f2827421972ac10 + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 9e42067b1ec70b773257529230dd7b3a + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 + - path: output/star/test.Aligned.out.bam + md5sum: d724ca90a102347b9c5052a33ea4d308 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out + - path: output/star/test.SJ.out.tab From 6f561b3b419ffd4b83434ffa8bae8eb387f15a4d Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Fri, 23 Jul 2021 10:44:00 +0100 Subject: [PATCH 026/314] enhance module fastp: add `save_merged` (#598) (#614) * enhance module fastp: add `save_merged` (#598) * removed md5sum checks from log and json --- modules/fastp/main.nf | 18 +++++++++++------- modules/fastp/meta.yml | 6 +++++- tests/modules/fastp/main.nf | 26 ++++++++++++++++++++++---- tests/modules/fastp/test.yml | 25 +++++++++++++++++++++++++ 4 files changed, 63 insertions(+), 12 deletions(-) diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index acba864a..652ffe80 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -21,14 +21,16 @@ process FASTP { input: tuple val(meta), path(reads) val save_trimmed_fail + val save_merged output: - tuple val(meta), path('*.trim.fastq.gz'), emit: reads - tuple val(meta), path('*.json') , emit: json - tuple val(meta), path('*.html') , emit: html - tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version - tuple val(meta), path('*.fail.fastq.gz'), optional:true, emit: reads_fail + tuple val(meta), path('*.trim.fastq.gz') , emit: reads + tuple val(meta), path('*.json') , emit: json + tuple val(meta), path('*.html') , emit: html + tuple val(meta), path('*.log') , emit: log + path '*.version.txt' , emit: version + tuple val(meta), path('*.fail.fastq.gz') , optional:true, emit: reads_fail + tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged script: // Added soft-links to original fastqs for consistent naming in MultiQC @@ -50,7 +52,8 @@ process FASTP { echo \$(fastp --version 2>&1) | sed -e "s/fastp //g" > ${software}.version.txt """ } else { - def fail_fastq = save_trimmed_fail ? "--unpaired1 ${prefix}_1.fail.fastq.gz --unpaired2 ${prefix}_2.fail.fastq.gz" : '' + def fail_fastq = save_trimmed_fail ? "--unpaired1 ${prefix}_1.fail.fastq.gz --unpaired2 ${prefix}_2.fail.fastq.gz" : '' + def merge_fastq = save_merged ? "-m --merged_out ${prefix}.merged.fastq.gz" : '' """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz @@ -62,6 +65,7 @@ process FASTP { --json ${prefix}.fastp.json \\ --html ${prefix}.fastp.html \\ $fail_fastq \\ + $merge_fastq \\ --thread $task.cpus \\ --detect_adapter_for_pe \\ $options.args \\ diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index 1fc3dfb6..d9130d6d 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -30,7 +30,7 @@ output: e.g. [ id:'test', single_end:false ] - reads: type: file - description: The trimmed/modified fastq reads + description: The trimmed/modified/unmerged fastq reads pattern: "*trim.fastq.gz" - json: type: file @@ -52,6 +52,10 @@ output: type: file description: Reads the failed the preprocessing pattern: "*fail.fastq.gz" + - reads_merged: + type: file + description: Reads that were successfully merged + pattern: "*.{merged.fastq.gz}" authors: - "@drpatelh" - "@kevinmenden" diff --git a/tests/modules/fastp/main.nf b/tests/modules/fastp/main.nf index f4129c09..c8e5112f 100644 --- a/tests/modules/fastp/main.nf +++ b/tests/modules/fastp/main.nf @@ -12,8 +12,9 @@ workflow test_fastp_single_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] save_trimmed_fail = false + save_merged = false - FASTP ( input, save_trimmed_fail ) + FASTP ( input, save_trimmed_fail, save_merged ) } // @@ -25,8 +26,9 @@ workflow test_fastp_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] save_trimmed_fail = false + save_merged = false - FASTP ( input, save_trimmed_fail ) + FASTP ( input, save_trimmed_fail, save_merged ) } // @@ -37,8 +39,9 @@ workflow test_fastp_single_end_trim_fail { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] save_trimmed_fail = true + save_merged = false - FASTP ( input, save_trimmed_fail ) + FASTP ( input, save_trimmed_fail, save_merged ) } // @@ -50,6 +53,21 @@ workflow test_fastp_paired_end_trim_fail { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] save_trimmed_fail = true + save_merged = false - FASTP ( input, save_trimmed_fail ) + FASTP ( input, save_trimmed_fail, save_merged ) +} + +// +// Test with paired-end data with merging +// +workflow test_fastp_paired_end_merged { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + save_trimmed_fail = false + save_merged = true + + FASTP ( input, save_trimmed_fail, save_merged ) } diff --git a/tests/modules/fastp/test.yml b/tests/modules/fastp/test.yml index a6e253af..365ce025 100644 --- a/tests/modules/fastp/test.yml +++ b/tests/modules/fastp/test.yml @@ -81,3 +81,28 @@ md5sum: e62ff0123a74adfc6903d59a449cbdb0 - path: output/fastp/test_2.fail.fastq.gz md5sum: f52309b35a7c15cbd56a9c3906ef98a5 + +- name: fastp test_fastp_paired_end_merged + command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_merged -c tests/config/nextflow.config + tags: + - fastp + files: + - path: output/fastp/test.fastp.html + contains: + - "
" + - path: output/fastp/test.fastp.json + contains: + - '"merged_and_filtered": {' + - '"total_reads": 75' + - '"total_bases": 13683' + - path: output/fastp/test.fastp.log + contains: + - "Merged and filtered:" + - "total reads: 75" + - "total bases: 13683" + - path: output/fastp/test.merged.fastq.gz + md5sum: ce88539076ced5aff11f866836ea1f40 + - path: output/fastp/test_1.trim.fastq.gz + md5sum: 65d75c13abbfbfd993914e1379634100 + - path: output/fastp/test_2.trim.fastq.gz + md5sum: 0d87ce4d8ef29fb35f337eb0f6c9fcb4 From a813e2e3a6b45585603c6f09d946d9bbbab914f6 Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Fri, 23 Jul 2021 22:24:19 +0100 Subject: [PATCH 027/314] Add bcftools reheader (#585) (#608) * local tests and linting passing (#585) * fix: picard filtersamreads input (#610) * Move readlist into same input channel as bam * Update test reflecting input restructuring * Update tests/modules/picard/filtersamreads/main.nf Co-authored-by: Harshil Patel * fix test Co-authored-by: Harshil Patel * Added module arriba (#611) * Updated the version of STAR in align and genomegenerate modules * Changes in test.yml * Changes in test.yml * Added module arriba * Changes in test configs * Added module Arriba for fusion detection * Fixed review comments * Added an output option for discarded fusions * Resolved some conflits * conflicts * Apply suggestions from code review Co-authored-by: Harshil Patel * added test for new header * enhance module fastp: add `save_merged` (#598) (#614) * enhance module fastp: add `save_merged` (#598) * removed md5sum checks from log and json * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> --- modules/bcftools/reheader/functions.nf | 68 ++++++++++++++++++++++++ modules/bcftools/reheader/main.nf | 47 ++++++++++++++++ modules/bcftools/reheader/meta.yml | 51 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/reheader/main.nf | 40 ++++++++++++++ tests/modules/bcftools/reheader/test.yml | 26 +++++++++ 6 files changed, 236 insertions(+) create mode 100644 modules/bcftools/reheader/functions.nf create mode 100644 modules/bcftools/reheader/main.nf create mode 100644 modules/bcftools/reheader/meta.yml create mode 100644 tests/modules/bcftools/reheader/main.nf create mode 100644 tests/modules/bcftools/reheader/test.yml diff --git a/modules/bcftools/reheader/functions.nf b/modules/bcftools/reheader/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bcftools/reheader/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf new file mode 100644 index 00000000..53b00411 --- /dev/null +++ b/modules/bcftools/reheader/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_REHEADER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" + } else { + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" + } + + input: + tuple val(meta), path(vcf) + path fai + path header + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def update_sequences = fai ? "-f $fai" : "" + def new_header = header ? "-h $header" : "" + """ + bcftools \\ + reheader \\ + $update_sequences \\ + $new_header \\ + $options.args \\ + --threads $task.cpus \\ + -o ${prefix}.vcf.gz \\ + $vcf + + echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bcftools/reheader/meta.yml b/modules/bcftools/reheader/meta.yml new file mode 100644 index 00000000..1b9c1a8b --- /dev/null +++ b/modules/bcftools/reheader/meta.yml @@ -0,0 +1,51 @@ +name: bcftools_reheader +description: Reheader a VCF file +keywords: + - reheader + - vcf + - update header +tools: + - reheader: + description: | + Modify header of VCF/BCF files, change sample names. + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: http://samtools.github.io/bcftools/bcftools.html#reheader + doi: 10.1093/gigascience/giab008 + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF/BCF file + pattern: "*.{vcf.gz,vcf,bcf}" + - fai: + type: file + description: Fasta index to update header sequences with + pattern: "*.{fai}" + - header: + type: file + description: New header to add to the VCF + pattern: "*.{header.txt}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - vcf: + type: file + description: VCF with updated header + pattern: "*.{vcf.gz}" + +authors: + - "@bjohnnyd" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 429b0ebd..69e39d91 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -50,6 +50,10 @@ bcftools/mpileup: - modules/bcftools/mpileup/** - tests/modules/bcftools/mpileup/** +bcftools/reheader: + - modules/bcftools/reheader/** + - tests/modules/bcftools/reheader/** + bcftools/stats: - modules/bcftools/stats/** - tests/modules/bcftools/stats/** diff --git a/tests/modules/bcftools/reheader/main.nf b/tests/modules/bcftools/reheader/main.nf new file mode 100644 index 00000000..40863331 --- /dev/null +++ b/tests/modules/bcftools/reheader/main.nf @@ -0,0 +1,40 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' addParams( options: [suffix: '.updated'] ) + +workflow test_bcftools_reheader_update_sequences { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) + ] + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + header = [] + BCFTOOLS_REHEADER ( input, fai, header ) +} + +workflow test_bcftools_reheader_new_header { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) + ] + fai = [] + header = file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + + BCFTOOLS_REHEADER ( input, fai, header ) +} + +workflow test_bcftools_reheader_new_header_update_sequences { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) + ] + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + header = file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + + BCFTOOLS_REHEADER ( input, fai, header ) +} diff --git a/tests/modules/bcftools/reheader/test.yml b/tests/modules/bcftools/reheader/test.yml new file mode 100644 index 00000000..78337206 --- /dev/null +++ b/tests/modules/bcftools/reheader/test.yml @@ -0,0 +1,26 @@ +- name: bcftools reheader test_bcftools_reheader_update_sequences + command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c tests/config/nextflow.config + tags: + - bcftools/reheader + - bcftools + files: + - path: output/bcftools/test.updated.vcf.gz + md5sum: 9e29f28038bfce77ee00022627209ed6 + +- name: bcftools reheader test_bcftools_reheader_new_header + command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c tests/config/nextflow.config + tags: + - bcftools/reheader + - bcftools + files: + - path: output/bcftools/test.updated.vcf.gz + md5sum: f7f536d889bbf5be40243252c394ee1f + +- name: bcftools reheader test_bcftools_reheader_new_header_update_sequences + command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c tests/config/nextflow.config + tags: + - bcftools/reheader + - bcftools + files: + - path: output/bcftools/test.updated.vcf.gz + md5sum: 9e29f28038bfce77ee00022627209ed6 From df909015b6651dadb52626cf471f15a6d33985a2 Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Mon, 26 Jul 2021 19:03:30 +0200 Subject: [PATCH 028/314] Added an optional output junction channel in STAR (#621) * Added an optional output channel for chimeric junctions * Fix in test.yml * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/star/align/main.nf | 1 + modules/star/align/meta.yml | 5 ++++ tests/modules/star/align/main.nf | 20 ++++++++++--- tests/modules/star/align/test.yml | 47 ++++++++++++++++++++++++++++++- 4 files changed, 68 insertions(+), 5 deletions(-) diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index 6e085f9b..c06daf24 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -36,6 +36,7 @@ process STAR_ALIGN { tuple val(meta), path('*Aligned.unsort.out.bam') , optional:true, emit: bam_unsorted tuple val(meta), path('*fastq.gz') , optional:true, emit: fastq tuple val(meta), path('*.tab') , optional:true, emit: tab + tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: def software = getSoftwareName(task.process) diff --git a/modules/star/align/meta.yml b/modules/star/align/meta.yml index 01bc2ecf..a589d145 100644 --- a/modules/star/align/meta.yml +++ b/modules/star/align/meta.yml @@ -69,7 +69,12 @@ output: type: file description: STAR output tab file(s) (optional) pattern: "*.tab" + - junction: + type: file + description: STAR chimeric junction output file (optional) + pattern: "*.out.junction" authors: - "@kevinmenden" - "@drpatelh" + - "@praveenraj2018" diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index 2a68d7cd..ff278efd 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9'] ) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'] ) -include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) - +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9'] ) +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'] ) +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) +include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30'] ) workflow test_star_alignment_single_end { input = [ [ id:'test', single_end:true ], // meta map @@ -42,3 +42,15 @@ workflow test_star_alignment_paired_end_for_fusion { STAR_GENOMEGENERATE ( fasta, gtf ) STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) } + +workflow test_star_alignment_paired_end_for_starfusion { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STAR_GENOMEGENERATE ( fasta, gtf ) + STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf ) +} diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 87413c2c..1d3b548d 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -129,4 +129,49 @@ - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - md5sum: 5155c9fd1f787ad6d7d80987fb06219c + +- name: star align test_star_alignment_paired_end_for_starfusion + command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c tests/config/nextflow.config + tags: + - star + - star/align + files: + - path: output/index/star/Genome + md5sum: a654229fbca6071dcb6b01ce7df704da + - path: output/index/star/Log.out + - path: output/index/star/SA + md5sum: 8c3edc46697b72c9e92440d4cf43506c + - path: output/index/star/SAindex + md5sum: 2a0c675d8b91d8e5e8c1826d3500482e + - path: output/index/star/chrLength.txt + md5sum: c81f40f27e72606d7d07097c1d56a5b5 + - path: output/index/star/chrName.txt + md5sum: 5ae68a67b70976ee95342a7451cb5af1 + - path: output/index/star/chrNameLength.txt + md5sum: b190587cae0531f3cf25552d8aa674db + - path: output/index/star/chrStart.txt + md5sum: 8d3291e6bcdbe9902fbd7c887494173f + - path: output/index/star/exonGeTrInfo.tab + md5sum: d04497f69d6ef889efd4d34fe63edcc4 + - path: output/index/star/exonInfo.tab + md5sum: 0d560290fab688b7268d88d5494bf9fe + - path: output/index/star/geneInfo.tab + md5sum: 8b608537307443ffaee4927d2b428805 + - path: output/index/star/genomeParameters.txt + md5sum: 3097677f4d8b2cb66770b9e55d343a7f + - path: output/index/star/sjdbInfo.txt + md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 + - path: output/index/star/sjdbList.fromGTF.out.tab + md5sum: 8760c33e966dad0b39f440301ebbdee4 + - path: output/index/star/sjdbList.out.tab + md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 + - path: output/index/star/transcriptInfo.tab + md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 + - path: output/star/test.Aligned.out.bam + md5sum: a1bd1b40950a58ea2776908076160052 + - path: output/star/test.Chimeric.out.junction + md5sum: 327629eb54032212f29e1c32cbac6975 + - path: output/star/test.Log.final.out + - path: output/star/test.Log.out + - path: output/star/test.Log.progress.out + - path: output/star/test.SJ.out.tab From a21cc95c690e39874ded387cb4ebd160845614cb Mon Sep 17 00:00:00 2001 From: Johnathan D <28043284+bjohnnyd@users.noreply.github.com> Date: Mon, 26 Jul 2021 19:07:29 +0100 Subject: [PATCH 029/314] Add variantbam (#618) * template created for variantbam (#616) * Add bcftools reheader (#585) (#608) * local tests and linting passing (#585) * fix: picard filtersamreads input (#610) * Move readlist into same input channel as bam * Update test reflecting input restructuring * Update tests/modules/picard/filtersamreads/main.nf Co-authored-by: Harshil Patel * fix test Co-authored-by: Harshil Patel * Added module arriba (#611) * Updated the version of STAR in align and genomegenerate modules * Changes in test.yml * Changes in test.yml * Added module arriba * Changes in test configs * Added module Arriba for fusion detection * Fixed review comments * Added an output option for discarded fusions * Resolved some conflits * conflicts * Apply suggestions from code review Co-authored-by: Harshil Patel * added test for new header * enhance module fastp: add `save_merged` (#598) (#614) * enhance module fastp: add `save_merged` (#598) * removed md5sum checks from log and json * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> * fixed autogenerated biocontainter links * variantbam module passing all tests/lints (#616) * Added an optional output junction channel in STAR (#621) * Added an optional output channel for chimeric junctions * Fix in test.yml * Apply suggestions from code review Co-authored-by: Harshil Patel * removed qcreport output fixes #616 Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> --- modules/variantbam/functions.nf | 68 +++++++++++++++++++++++++++++++ modules/variantbam/main.nf | 41 +++++++++++++++++++ modules/variantbam/meta.yml | 46 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/variantbam/main.nf | 13 ++++++ tests/modules/variantbam/test.yml | 7 ++++ 6 files changed, 179 insertions(+) create mode 100644 modules/variantbam/functions.nf create mode 100644 modules/variantbam/main.nf create mode 100644 modules/variantbam/meta.yml create mode 100644 tests/modules/variantbam/main.nf create mode 100644 tests/modules/variantbam/test.yml diff --git a/modules/variantbam/functions.nf b/modules/variantbam/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/variantbam/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf new file mode 100644 index 00000000..dc29de58 --- /dev/null +++ b/modules/variantbam/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '1.4.4a' + +process VARIANTBAM { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::variantbam=1.4.4a" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5" + } else { + container "quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam") , emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + variant \\ + $bam \\ + -o ${prefix}.bam \\ + $options.args + + echo $VERSION > ${software}.version.txt + """ +} diff --git a/modules/variantbam/meta.yml b/modules/variantbam/meta.yml new file mode 100644 index 00000000..da0ff5e0 --- /dev/null +++ b/modules/variantbam/meta.yml @@ -0,0 +1,46 @@ +name: variantbam +description: Filtering, downsampling and profiling alignments in BAM/CRAM formats +keywords: + - filter + - bam + - subsample + - downsample + - downsample bam + - subsample bam +tools: + - variantbam: + description: Filtering and profiling of next-generational sequencing data using region-specific rules + homepage: https://github.com/walaj/VariantBam + documentation: https://github.com/walaj/VariantBam#table-of-contents + tool_dev_url: https://github.com/walaj/VariantBam + doi: 10.1093/bioinformatics/btw111 + licence: ['Apache2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM file + pattern: "*.{bam,cram}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: Filtered or downsampled BAM file + pattern: "*.{bam}" + +authors: + - "@bjohnnyd" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 69e39d91..30de48cb 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -831,6 +831,10 @@ untar: - modules/untar/** - tests/modules/untar/** +variantbam: + - modules/variantbam/** + - tests/modules/variantbam/** + vcftools: - modules/vcftools/** - tests/modules/vcftools/** diff --git a/tests/modules/variantbam/main.nf b/tests/modules/variantbam/main.nf new file mode 100644 index 00000000..3ea09197 --- /dev/null +++ b/tests/modules/variantbam/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { VARIANTBAM } from '../../../modules/variantbam/main.nf' addParams( options: [args: '-m 1'] ) + +workflow test_variantbam { + + input = [ [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + VARIANTBAM ( input ) +} diff --git a/tests/modules/variantbam/test.yml b/tests/modules/variantbam/test.yml new file mode 100644 index 00000000..51b824cd --- /dev/null +++ b/tests/modules/variantbam/test.yml @@ -0,0 +1,7 @@ +- name: variantbam test_variantbam + command: nextflow run tests/modules/variantbam -entry test_variantbam -c tests/config/nextflow.config + tags: + - variantbam + files: + - path: output/variantbam/test.bam + md5sum: fc08f065475d60b3b06ee32920564d4b From 45dee96bdf53c4867ccce7c72c8b8a28977b9faf Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 27 Jul 2021 09:13:48 +0200 Subject: [PATCH 030/314] module: bwa/aln (#624) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add bwa/aln module * Also output reads as required with SAI * fix container paths * Sync bwa version samese/sampe * Apply suggestions from code review Co-authored-by: Harshil Patel --- README.md | 16 +++++--- modules/bwa/aln/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/bwa/aln/main.nf | 67 ++++++++++++++++++++++++++++++++ modules/bwa/aln/meta.yml | 54 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bwa/aln/main.nf | 33 ++++++++++++++++ tests/modules/bwa/aln/test.yml | 39 +++++++++++++++++++ 7 files changed, 275 insertions(+), 6 deletions(-) create mode 100644 modules/bwa/aln/functions.nf create mode 100644 modules/bwa/aln/main.nf create mode 100644 modules/bwa/aln/meta.yml create mode 100644 tests/modules/bwa/aln/main.nf create mode 100644 tests/modules/bwa/aln/test.yml diff --git a/README.md b/README.md index 2e78d970..7239b24d 100644 --- a/README.md +++ b/README.md @@ -429,6 +429,16 @@ using a combination of `bwa` and `samtools` to output a BAM file instead of a SA - All function names MUST follow the `camelCase` convention. +#### Input/output options + +- Input channel declarations MUST be defined for all _possible_ input files (i.e. both required and optional files). + - Directly associated auxiliary files to an input file MAY be defined within the same input channel alongside the main input channel (e.g. [BAM and BAI](https://github.com/nf-core/modules/blob/e937c7950af70930d1f34bb961403d9d2aa81c7d/modules/samtools/flagstat/main.nf#L22)). + - Other generic auxiliary files used across different input files (e.g. common reference sequences) MAY be defined using a dedicated input channel (e.g. [reference files](https://github.com/nf-core/modules/blob/3cabc95d0ed8a5a4e07b8f9b1d1f7ff9a70f61e1/modules/bwa/mem/main.nf#L21-L23)). + +- Named file extensions MUST be emitted for ALL output channels e.g. `path "*.txt", emit: txt`. + +- Optional inputs are not currently supported by Nextflow. However, passing an empty list (`[]`) instead of a file as a module parameter can be used to work around this issue. + #### Module parameters - A module file SHOULD only define input and output files as command-line parameters to be executed within the process. @@ -439,12 +449,6 @@ using a combination of `bwa` and `samtools` to output a BAM file instead of a SA - Any parameters that need to be evaluated in the context of a particular sample e.g. single-end/paired-end data MUST also be defined within the process. -#### Input/output options - -- Named file extensions MUST be emitted for ALL output channels e.g. `path "*.txt", emit: txt`. - -- Optional inputs are not currently supported by Nextflow. However, passing an empty list (`[]`) instead of a file as a module parameter can be used to work around this issue. - #### Resource requirements - An appropriate resource `label` MUST be provided for the module as listed in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29-L46) e.g. `process_low`, `process_medium` or `process_high`. diff --git a/modules/bwa/aln/functions.nf b/modules/bwa/aln/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bwa/aln/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf new file mode 100644 index 00000000..d9c2ba13 --- /dev/null +++ b/modules/bwa/aln/main.nf @@ -0,0 +1,67 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BWA_ALN { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8" + } else { + container "quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8" + } + + input: + tuple val(meta), path(reads) + path index + + output: + tuple val(meta), path(reads), path("*.sai"), emit: sai + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if (meta.single_end) { + """ + INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` + + bwa aln \\ + $options.args \\ + -t $task.cpus \\ + -f ${prefix}.sai \\ + \$INDEX \\ + ${reads} + + echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + """ + } else { + """ + INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` + + bwa aln \\ + $options.args \\ + -t $task.cpus \\ + -f ${prefix}.1.sai \\ + \$INDEX \\ + ${reads[0]} + + bwa aln \\ + $options.args \\ + -t $task.cpus \\ + -f ${prefix}.2.sai \\ + \$INDEX \\ + ${reads[1]} + + echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + """ + } +} diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml new file mode 100644 index 00000000..eac1f509 --- /dev/null +++ b/modules/bwa/aln/meta.yml @@ -0,0 +1,54 @@ +name: bwa_aln +description: Find SA coordinates of the input reads for bwa short-read mapping +keywords: + - bwa + - aln + - short-read + - align + - reference + - fasta + - map + - fastq +tools: + - bwa: + description: | + BWA is a software package for mapping DNA sequences against + a large reference genome, such as the human genome. + homepage: http://bio-bwa.sourceforge.net/ + documentation: http://bio-bwa.sourceforge.net/ + doi: "10.1093/bioinformatics/btp324" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - index: + type: file + description: BWA genome index files + pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - sai: + type: file + description: SA coordinate file + pattern: "*.sai" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 30de48cb..885978df 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -158,6 +158,10 @@ bowtie2/build: - modules/bowtie2/build/** - tests/modules/bowtie2/build_test/** +bwa/aln: + - modules/bwa/aln/** + - tests/modules/bwa/aln/** + bwa/index: - modules/bwa/index/** - tests/modules/bwa/index/** diff --git a/tests/modules/bwa/aln/main.nf b/tests/modules/bwa/aln/main.nf new file mode 100644 index 00000000..feb7473d --- /dev/null +++ b/tests/modules/bwa/aln/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) + +// +// Test with single-end data +// +workflow test_bwa_aln_single_end { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_ALN ( input, BWA_INDEX.out.index ) +} + +// +// Test with paired-end data +// +workflow test_bwa_aln_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_ALN ( input, BWA_INDEX.out.index ) +} diff --git a/tests/modules/bwa/aln/test.yml b/tests/modules/bwa/aln/test.yml new file mode 100644 index 00000000..08848143 --- /dev/null +++ b/tests/modules/bwa/aln/test.yml @@ -0,0 +1,39 @@ +- name: bwa aln single-end + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c tests/config/nextflow.config + tags: + - bwa + - bwa/aln + files: + - path: ./output/bwa/test.sai + md5sum: aaaf39b6814c96ca1a5eacc662adf926 + - path: ./output/index/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/index/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/index/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/index/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/index/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + +- name: bwa aln paired-end + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c tests/config/nextflow.config + tags: + - bwa + - bwa/aln + files: + - path: ./output/bwa/test.1.sai + md5sum: aaaf39b6814c96ca1a5eacc662adf926 + - path: ./output/bwa/test.2.sai + md5sum: b4f185d9b4cb256dd5c377070a536124 + - path: ./output/index/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/index/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/index/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/index/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/index/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 From c41c9487e9bcc4888103151df8d15ff229608fc6 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 27 Jul 2021 09:36:03 +0200 Subject: [PATCH 031/314] Specify more guidelines on input channels (#615) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence From e1951d54be3d7ca25999cf362096817a74f72277 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Tue, 27 Jul 2021 09:32:18 -0500 Subject: [PATCH 032/314] Update dsh-bio to 2.0.5 (#628) --- modules/dshbio/filterbed/main.nf | 6 +++--- modules/dshbio/filtergff3/main.nf | 6 +++--- modules/dshbio/splitbed/main.nf | 6 +++--- modules/dshbio/splitgff3/main.nf | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 92aadc41..cc1daa7d 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.4" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.4--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.4--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" } input: diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bb3a4abd..596c6b8f 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.4" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.4--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.4--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" } input: diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 233b5319..75307b14 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.4" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.4--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.4--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" } input: diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index 62f72241..fa434b75 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.4" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.4--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.4--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" } input: From c5235a983d454787fa0c3247b02086969217163b Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 28 Jul 2021 09:10:44 +0100 Subject: [PATCH 033/314] nf-core modules bump-versions for all modules (#630) * Add blacklist of modules that shouldn't be updated to .nf-core.yml * nf-core modules bump-versions for all modules * Remove TODO statements identified by linting * Fix md5sums for failing tests * Fix more tests --- .nf-core.yml | 9 ++++ modules/allelecounter/main.nf | 6 +-- modules/bcftools/consensus/main.nf | 6 +-- modules/bcftools/filter/main.nf | 6 +-- modules/bcftools/isec/main.nf | 6 +-- modules/bcftools/merge/main.nf | 6 +-- modules/bcftools/mpileup/main.nf | 6 +-- modules/bcftools/stats/main.nf | 6 +-- modules/blast/blastn/main.nf | 6 +-- modules/blast/makeblastdb/main.nf | 6 +-- modules/bowtie2/build/main.nf | 6 +-- modules/cnvkit/main.nf | 6 +-- modules/cutadapt/main.nf | 6 +-- modules/deeptools/computematrix/main.nf | 6 +-- modules/deeptools/plotfingerprint/main.nf | 6 +-- modules/deeptools/plotheatmap/main.nf | 6 +-- modules/deeptools/plotprofile/main.nf | 6 +-- modules/gubbins/main.nf | 6 +-- modules/hisat2/build/main.nf | 6 +-- modules/hisat2/extractsplicesites/main.nf | 6 +-- modules/iqtree/main.nf | 6 +-- modules/kallistobustools/count/main.nf | 54 +++++++++---------- modules/kallistobustools/count/meta.yml | 26 +++------ modules/kallistobustools/ref/main.nf | 6 +-- modules/last/dotplot/main.nf | 6 +-- modules/last/lastal/main.nf | 6 +-- modules/last/lastdb/main.nf | 6 +-- modules/last/mafconvert/main.nf | 6 +-- modules/last/mafswap/main.nf | 6 +-- modules/last/postmask/main.nf | 6 +-- modules/last/split/main.nf | 6 +-- modules/last/train/main.nf | 6 +-- modules/metaphlan3/main.nf | 6 +-- modules/methyldackel/extract/main.nf | 6 +-- modules/methyldackel/mbias/main.nf | 6 +-- modules/minimap2/align/main.nf | 6 +-- modules/minimap2/index/main.nf | 6 +-- modules/mosdepth/main.nf | 6 +-- modules/multiqc/main.nf | 6 +-- modules/nanoplot/main.nf | 6 +-- modules/picard/collectmultiplemetrics/main.nf | 6 +-- modules/picard/collectwgsmetrics/main.nf | 6 +-- modules/picard/filtersamreads/main.nf | 6 +-- modules/picard/markduplicates/main.nf | 6 +-- modules/picard/mergesamfiles/main.nf | 6 +-- modules/picard/sortsam/main.nf | 6 +-- modules/prodigal/meta.yml | 10 +--- modules/raxmlng/main.nf | 6 +-- modules/salmon/index/main.nf | 6 +-- modules/salmon/quant/main.nf | 6 +-- modules/samtools/faidx/main.nf | 6 +-- modules/samtools/fastq/main.nf | 6 +-- modules/samtools/flagstat/main.nf | 6 +-- modules/samtools/idxstats/main.nf | 6 +-- modules/samtools/index/main.nf | 6 +-- modules/samtools/merge/main.nf | 6 +-- modules/samtools/mpileup/main.nf | 6 +-- modules/samtools/sort/main.nf | 6 +-- modules/samtools/stats/main.nf | 6 +-- modules/samtools/view/main.nf | 6 +-- modules/seqkit/split2/main.nf | 6 +-- modules/seqwish/induce/main.nf | 6 +-- modules/shovill/meta.yml | 8 ++- modules/spades/main.nf | 6 +-- modules/tabix/bgzip/main.nf | 6 +-- modules/tabix/bgziptabix/main.nf | 6 +-- modules/tabix/tabix/main.nf | 6 +-- modules/trimgalore/main.nf | 6 +-- tests/modules/bcftools/filter/test.yml | 2 +- tests/modules/bcftools/mpileup/test.yml | 6 +-- tests/modules/bcftools/stats/test.yml | 2 +- tests/modules/blast/blastn/test.yml | 4 +- tests/modules/blast/makeblastdb/test.yml | 4 +- .../deeptools/plotfingerprint/test.yml | 6 +-- tests/modules/gubbins/test.yml | 8 +-- tests/modules/hisat2/align/test.yml | 4 +- tests/modules/hisat2/build_test/test.yml | 2 +- tests/modules/kallistobustools/count/main.nf | 30 +++++------ tests/modules/kallistobustools/count/test.yml | 36 ++++++------- tests/modules/kallistobustools/ref/test.yml | 15 ------ tests/modules/last/lastal/test.yml | 4 +- tests/modules/last/lastdb/test.yml | 4 +- tests/modules/minimap2/align/test.yml | 4 +- tests/modules/picard/markduplicates/test.yml | 2 +- tests/modules/picard/mergesamfiles/test.yml | 2 +- tests/modules/salmon/index/test.yml | 2 +- tests/modules/salmon/quant/test.yml | 12 ++--- tests/modules/samtools/fastq/test.yml | 4 +- tests/modules/samtools/flagstat/test.yml | 2 +- tests/modules/samtools/mpileup/test.yml | 2 +- tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/test.yml | 2 +- tests/modules/spades/test.yml | 10 ++-- tests/modules/tabix/bgzip/test.yml | 2 +- tests/modules/tabix/bgziptabix/test.yml | 4 +- tests/modules/tabix/tabix/test.yml | 6 +-- 96 files changed, 321 insertions(+), 347 deletions(-) create mode 100644 .nf-core.yml diff --git a/.nf-core.yml b/.nf-core.yml new file mode 100644 index 00000000..72971af8 --- /dev/null +++ b/.nf-core.yml @@ -0,0 +1,9 @@ +bump-versions: + rseqc/junctionannotation: False + rseqc/bamstat: False + rseqc/readduplication: False + rseqc/readdistribution: False + rseqc/junctionsaturation: False + rseqc/inferexperiment: False + rseqc/innerdistance: False + sortmerna: False diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 8f090566..ad24b3c1 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -11,11 +11,11 @@ process ALLELECOUNTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::cancerit-allelecount=4.2.1" : null) + conda (params.enable_conda ? 'bioconda::cancerit-allelecount=4.3.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.2.1--h3ecb661_0" + container "https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0" } else { - container "quay.io/biocontainers/cancerit-allelecount:4.2.1--h3ecb661_0" + container "quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0" } input: diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 67321fc2..0403f050 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_CONSENSUS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::bcftools=1.11' : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0' + container 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' } else { - container 'quay.io/biocontainers/bcftools:1.11--h7c999a4_0' + container 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' } input: diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index d7ec0d2b..fbdac0de 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_FILTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" } input: diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index b0bde522..28c6103e 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_ISEC { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" } input: diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index 09bc2e7d..66c52281 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_MERGE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" } input: diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index 287a0c9d..de9b951f 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_MPILEUP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" } input: diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index 84e48c05..90be5d2b 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -11,11 +11,11 @@ process BCFTOOLS_STATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" } input: diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 8d519613..87e012e2 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -11,11 +11,11 @@ process BLAST_BLASTN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::blast=2.10.1' : null) + conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.10.1--pl526he19e7b1_3' + container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' } else { - container 'quay.io/biocontainers/blast:2.10.1--pl526he19e7b1_3' + container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' } input: diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index 3e3b74c2..c938e8f6 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -11,11 +11,11 @@ process BLAST_MAKEBLASTDB { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? 'bioconda::blast=2.10.1' : null) + conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.10.1--pl526he19e7b1_3' + container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' } else { - container 'quay.io/biocontainers/blast:2.10.1--pl526he19e7b1_3' + container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' } input: diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index 42ff1d20..442fed18 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -11,11 +11,11 @@ process BOWTIE2_BUILD { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? 'bioconda::bowtie2=2.4.2' : null) + conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.2--py38h1c8e9b9_1' + container 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' } else { - container 'quay.io/biocontainers/bowtie2:2.4.2--py38h1c8e9b9_1' + container 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' } input: diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/main.nf index dee6051d..4416919e 100755 --- a/modules/cnvkit/main.nf +++ b/modules/cnvkit/main.nf @@ -11,11 +11,11 @@ process CNVKIT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::cnvkit=0.9.8" : null) + conda (params.enable_conda ? 'bioconda::cnvkit=0.9.9' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cnvkit:0.9.8--py_0" + container "https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0" } else { - container "quay.io/biocontainers/cnvkit:0.9.8--py_0" + container "quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0" } input: diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index c392367e..6dccc2bc 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -11,11 +11,11 @@ process CUTADAPT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::cutadapt=3.2' : null) + conda (params.enable_conda ? 'bioconda::cutadapt=3.4' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/cutadapt:3.2--py38h0213d0e_0' + container 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' } else { - container 'quay.io/biocontainers/cutadapt:3.2--py38h0213d0e_0' + container 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' } input: diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index bee16d3c..739e7cc1 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -11,11 +11,11 @@ process DEEPTOOLS_COMPUTEMATRIX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::deeptools=3.5.0" : null) + conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.0--py_0" + container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" } else { - container "quay.io/biocontainers/deeptools:3.5.0--py_0" + container "quay.io/biocontainers/deeptools:3.5.1--py_0" } input: diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index 42d5e6a2..56ecb688 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -11,11 +11,11 @@ process DEEPTOOLS_PLOTFINGERPRINT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::deeptools=3.5.0" : null) + conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.0--py_0" + container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" } else { - container "quay.io/biocontainers/deeptools:3.5.0--py_0" + container "quay.io/biocontainers/deeptools:3.5.1--py_0" } input: diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 552dc117..8e25d96f 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -11,11 +11,11 @@ process DEEPTOOLS_PLOTHEATMAP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::deeptools=3.5.0" : null) + conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.0--py_0" + container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" } else { - container "quay.io/biocontainers/deeptools:3.5.0--py_0" + container "quay.io/biocontainers/deeptools:3.5.1--py_0" } input: diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index 59bfacd3..95f65c84 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -11,11 +11,11 @@ process DEEPTOOLS_PLOTPROFILE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::deeptools=3.5.0" : null) + conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.0--py_0" + container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" } else { - container "quay.io/biocontainers/deeptools:3.5.0--py_0" + container "quay.io/biocontainers/deeptools:3.5.1--py_0" } input: diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index 9129d14b..7f0041c8 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -10,11 +10,11 @@ process GUBBINS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::gubbins=2.4.1" : null) + conda (params.enable_conda ? 'bioconda::gubbins=3.0.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gubbins:2.4.1--py38h197edbe_1" + container "https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0" } else { - container "quay.io/biocontainers/gubbins:2.4.1--py38h197edbe_1" + container "quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0" } input: diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index ce8bfb26..3e74b1d4 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -14,11 +14,11 @@ process HISAT2_BUILD { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::hisat2=2.2.0" : null) + conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.0--py37hfa133b6_4" + container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" } else { - container "quay.io/biocontainers/hisat2:2.2.0--py37hfa133b6_4" + container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" } input: diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index 57f4dedb..d97fdb89 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -13,11 +13,11 @@ process HISAT2_EXTRACTSPLICESITES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::hisat2=2.2.0" : null) + conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.0--py37hfa133b6_4" + container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" } else { - container "quay.io/biocontainers/hisat2:2.2.0--py37hfa133b6_4" + container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" } input: diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index 1eeb3a2a..3bd0f3b1 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -11,11 +11,11 @@ process IQTREE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::iqtree=2.1.2" : null) + conda (params.enable_conda ? 'bioconda::iqtree=2.1.4_beta' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/iqtree:2.1.2--h56fc30b_0" + container "https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0" } else { - container "quay.io/biocontainers/iqtree:2.1.2--h56fc30b_0" + container "quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0" } input: diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 5b7416ea..86172fc3 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -11,47 +11,45 @@ process KALLISTOBUSTOOLS_COUNT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::kb-python=0.26.0" : null) + conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.0--pyhdfd78af_0" + container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" } else { - container "quay.io/biocontainers/kb-python:0.26.0--pyhdfd78af_0" + container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" } input: - tuple val(meta), path(reads) - path index - path t2g - path t1c - path t2c - val use_t1c - val use_t2c - val workflow - val technology + tuple val(meta), path(reads) + path index + path t2g + path t1c + path t2c + val workflow + val technology output: - tuple val(meta), path ("*_kallistobustools_count") , emit: kallistobustools_count - path "*.version.txt" , emit: version + tuple val(meta), path ("*.count"), emit: count + path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def cdna = use_t1c ? "-c1 $t1c" : '' - def introns = use_t2c ? "-c2 $t2c" : '' + def cdna = t1c ? "-c1 $t1c" : '' + def introns = t2c ? "-c2 $t2c" : '' """ kb \\ - count \\ - -t $task.cpus \\ - -i $index \\ - -g $t2g \\ - $cdna \\ - $introns \\ - --workflow $workflow \\ - -x $technology \\ - $options.args \\ - -o ${prefix}_kallistobustools_count \\ - ${reads[0]} \\ - ${reads[1]} + count \\ + -t $task.cpus \\ + -i $index \\ + -g $t2g \\ + $cdna \\ + $introns \\ + --workflow $workflow \\ + -x $technology \\ + $options.args \\ + -o ${prefix}.count \\ + ${reads[0]} \\ + ${reads[1]} echo \$(kb 2>&1) | sed 's/^kb_python //; s/Usage.*\$//' > ${software}.version.txt """ diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index 9e6fa720..688dfdef 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -18,14 +18,11 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - fastq1: - type: file - description: Read 1 fastq file - pattern: "*.{fastq,fastq.gz}" - - fastq2: - type: file - description: Read 2 fastq file - pattern: "*.{fastq,fastq.gz}" + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. - index: type: file description: kb-ref index file (.idx) @@ -38,17 +35,11 @@ input: type: file description: kb ref's c1 spliced_t2c file pattern: "*.{cdna_t2c.txt}" - - use_t1c: - type: boolean - description: Whether to use the c1 txt file for RNA velocity and nucleus workflows - t2c: type: file description: kb ref's c2 unspliced_t2c file pattern: "*.{introns_t2c.txt}" - - use_t2c: - type: boolean - description: Whether to use the c2 txt file for RNA velocity and nucleus workflows - - kb_workflow: + - workflow: type: value description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus,kite}" @@ -57,17 +48,16 @@ input: description: String value defining the sequencing technology used. pattern: "{10XV1,10XV2,10XV3,CELSEQ,CELSEQ2,DROPSEQ,INDROPSV1,INDROPSV2,INDROPSV3,SCRUBSEQ,SURECELL,SMARTSEQ}" - output: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test'] - - kallistobustools_count: + - count: type: file description: kb count output folder - pattern: "*_{kallistobustools_count}" + pattern: "*.{count}" - version: type: file description: File containing software version diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index 427251cc..ffcd643e 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -11,11 +11,11 @@ process KALLISTOBUSTOOLS_REF { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::kb-python=0.26.0" : null) + conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.0--pyhdfd78af_0" + container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" } else { - container "quay.io/biocontainers/kb-python:0.26.0--pyhdfd78af_0" + container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" } input: diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index 4771aa4c..3644a18e 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -11,11 +11,11 @@ process LAST_DOTPLOT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index 4d1cb9c7..e42653cc 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -11,11 +11,11 @@ process LAST_LASTAL { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index ca376f67..a8cd4921 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -11,11 +11,11 @@ process LAST_LASTDB { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index 89c7d818..eea53dd1 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -11,11 +11,11 @@ process LAST_MAFCONVERT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index e0a697f4..03292c81 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -11,11 +11,11 @@ process LAST_MAFSWAP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index 72584b3e..677b23f6 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -11,11 +11,11 @@ process LAST_POSTMASK { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index 021b1bbf..a6fe1dda 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -11,11 +11,11 @@ process LAST_SPLIT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index d6fd4007..cc1fa544 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -11,11 +11,11 @@ process LAST_TRAIN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::last=1238" : null) + conda (params.enable_conda ? 'bioconda::last=1250' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1238--h2e03b76_0" + container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" } else { - container "quay.io/biocontainers/last:1238--h2e03b76_0" + container "quay.io/biocontainers/last:1250--h2e03b76_0" } input: diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 83bb9883..6c75c2a0 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -11,11 +11,11 @@ process METAPHLAN3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::metaphlan=3.0.10" : null) + conda (params.enable_conda ? 'bioconda::metaphlan=3.0.12' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metaphlan:3.0.10--pyhb7b1952_0" + container "https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0" } else { - container "quay.io/biocontainers/metaphlan:3.0.10--pyhb7b1952_0" + container "quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0" } input: diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index d0a0b58d..4c7da3f4 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -11,11 +11,11 @@ process METHYLDACKEL_EXTRACT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::methyldackel=0.5.2" : null) + conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.5.2--h7435645_0" + container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" } else { - container "quay.io/biocontainers/methyldackel:0.5.2--h7435645_0" + container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" } input: diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index dcff677e..7c18197f 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -11,11 +11,11 @@ process METHYLDACKEL_MBIAS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::methyldackel=0.5.2" : null) + conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.5.2--h7435645_0" + container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" } else { - container "quay.io/biocontainers/methyldackel:0.5.2--h7435645_0" + container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" } input: diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index 71b745a3..ec5f6a07 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -11,11 +11,11 @@ process MINIMAP2_ALIGN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::minimap2=2.17" : null) + conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.17--hed695b0_3" + container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" } else { - container "quay.io/biocontainers/minimap2:2.17--hed695b0_3" + container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" } input: diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index ee0c1b36..e143bd62 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -10,11 +10,11 @@ process MINIMAP2_INDEX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } - conda (params.enable_conda ? "bioconda::minimap2=2.17" : null) + conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.17--hed695b0_3" + container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" } else { - container "quay.io/biocontainers/minimap2:2.17--hed695b0_3" + container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" } input: diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index 618efd79..6beea37a 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -11,11 +11,11 @@ process MOSDEPTH { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::mosdepth=0.3.1' : null) + conda (params.enable_conda ? 'bioconda::mosdepth=0.3.2' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mosdepth:0.3.1--ha7ba039_0" + container "https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0" } else { - container "quay.io/biocontainers/mosdepth:0.3.1--ha7ba039_0" + container "quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0" } input: diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index da780800..8b6d6f0c 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -10,11 +10,11 @@ process MULTIQC { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::multiqc=1.10.1" : null) + conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.10.1--py_0" + container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" } else { - container "quay.io/biocontainers/multiqc:1.10.1--py_0" + container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" } input: diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index af080dc8..f5fffe13 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -11,11 +11,11 @@ process NANOPLOT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::nanoplot=1.36.1" : null) + conda (params.enable_conda ? 'bioconda::nanoplot=1.38.0' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanoplot:1.36.1--pyhdfd78af_0" + container "https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0" } else { - container "quay.io/biocontainers/nanoplot:1.36.1--pyhdfd78af_0" + container "quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0" } input: diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index c0059a40..81547e84 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -11,11 +11,11 @@ process PICARD_COLLECTMULTIPLEMETRICS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.23.9" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.23.9--0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.23.9--0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index f1c69d28..2f01354c 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -11,11 +11,11 @@ process PICARD_COLLECTWGSMETRICS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.25.0" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.0--0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.25.0--0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index c22bbaa3..22b8c5a8 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -11,11 +11,11 @@ process PICARD_FILTERSAMREADS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.25.6" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.6--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.25.6--hdfd78af_0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index d7647414..d20014bf 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -11,11 +11,11 @@ process PICARD_MARKDUPLICATES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.23.9" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.23.9--0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.23.9--0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index abbfae8f..9fd28af6 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -11,11 +11,11 @@ process PICARD_MERGESAMFILES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.23.9" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.23.9--0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.23.9--0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index 2af28496..bb815c8f 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -12,11 +12,11 @@ process PICARD_SORTSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::picard=2.25.6" : null) + conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.6--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" } else { - container "quay.io/biocontainers/picard:2.25.6--hdfd78af_0" + container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" } input: diff --git a/modules/prodigal/meta.yml b/modules/prodigal/meta.yml index 10e0a3eb..f48fe96d 100644 --- a/modules/prodigal/meta.yml +++ b/modules/prodigal/meta.yml @@ -1,32 +1,27 @@ name: prodigal -## TODO nf-core: Add a description of the module and list keywords -description: write your description here +description: Prodigal (Prokaryotic Dynamic Programming Genefinding Algorithm) is a microbial (bacterial and archaeal) gene finding program keywords: - sort tools: - prodigal: - ## TODO nf-core: Add a description and other details for the software below description: Prodigal (Prokaryotic Dynamic Programming Genefinding Algorithm) is a microbial (bacterial and archaeal) gene finding program homepage: {} documentation: {} tool_dev_url: {} doi: "" - licence: ['GPL v3'] + licence: ["GPL v3"] -## TODO nf-core: Add a description of all of the variables used as input input: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - ## TODO nf-core: Delete / customise this example input - bam: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" -## TODO nf-core: Add a description of all of the variables used as output output: - meta: type: map @@ -37,7 +32,6 @@ output: type: file description: File containing software version pattern: "*.{version.txt}" - ## TODO nf-core: Delete / customise this example output - bam: type: file description: Sorted BAM/CRAM/SAM file diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index 02c01927..9f8597b5 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -10,11 +10,11 @@ process RAXMLNG { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::raxml-ng=1.0.2" : null) + conda (params.enable_conda ? 'bioconda::raxml-ng=1.0.3' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/raxml-ng:1.0.2--h7447c1b_0" + container "https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0" } else { - container "quay.io/biocontainers/raxml-ng:1.0.2--h7447c1b_0" + container "quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0" } input: diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index 17d5bc06..e72ff121 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -11,11 +11,11 @@ process SALMON_INDEX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::salmon=1.4.0" : null) + conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.4.0--hf69c8f4_0" + container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" } else { - container "quay.io/biocontainers/salmon:1.4.0--hf69c8f4_0" + container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" } input: diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 093137de..1b9b5803 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -11,11 +11,11 @@ process SALMON_QUANT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::salmon=1.4.0" : null) + conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.4.0--hf69c8f4_0" + container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" } else { - container "quay.io/biocontainers/salmon:1.4.0--hf69c8f4_0" + container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" } input: diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 6c023f1c..a89ff2bb 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FAIDX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 48b3a43f..6bedbb4e 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index a66ea56d..d4852c66 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FLAGSTAT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index ff3cd9a6..14a07cfb 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_IDXSTATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 778e9384..e1966fb3 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_INDEX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 23b31e2f..0182b9fd 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MERGE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 8f2cebd1..f736e9c7 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MPILEUP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 240e8e9f..0a6b7048 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_SORT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 6bb0a4c7..8c72d725 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_STATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index ec6c747f..2ca57032 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_VIEW { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.12" : null) + conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.12--hd5e65b6_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" } else { - container "quay.io/biocontainers/samtools:1.12--hd5e65b6_0" + container "quay.io/biocontainers/samtools:1.13--h8c37831_0" } input: diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 4c516c93..5eeb0ad0 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -12,12 +12,12 @@ process SEQKIT_SPLIT2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::seqkit=0.16.0" : null) + conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqkit:0.16.0--h9ee0642_0" + container "https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0" } else { - container "quay.io/biocontainers/seqkit:0.16.0--h9ee0642_0" + container "quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0" } input: diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index 64ecd595..defd86e2 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -12,12 +12,12 @@ process SEQWISH_INDUCE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::seqwish=0.4.1" : null) + conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqwish:0.4.1--h8b12597_0" + container "https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0" } else { - container "quay.io/biocontainers/seqwish:0.4.1--h8b12597_0" + container "quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0" } input: diff --git a/modules/shovill/meta.yml b/modules/shovill/meta.yml index 0a8661b1..b8f24e34 100644 --- a/modules/shovill/meta.yml +++ b/modules/shovill/meta.yml @@ -1,5 +1,4 @@ name: shovill -## TODO nf-core: Add a description of the module and list keywords description: Assemble bacterial isolate genomes from Illumina paired-end reads keywords: - bacterial @@ -8,11 +7,10 @@ keywords: tools: - shovill: - ## TODO nf-core: Add a description and other details for the software below description: Microbial assembly pipeline for Illumina paired-end reads homepage: https://github.com/tseemann/shovill documentation: https://github.com/tseemann/shovill/blob/master/README.md - licence: ['GPL v2'] + licence: ["GPL v2"] input: - meta: @@ -21,8 +19,8 @@ input: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - reads: - type: file - description: List of input paired-end FastQ files + type: file + description: List of input paired-end FastQ files output: - meta: diff --git a/modules/spades/main.nf b/modules/spades/main.nf index c6208053..e78500f2 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -11,11 +11,11 @@ process SPADES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::spades=3.15.2" : null) + conda (params.enable_conda ? 'bioconda::spades=3.15.3' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spades:3.15.2--h95f258a_1" + container "https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0" } else { - container "quay.io/biocontainers/spades:3.15.2--h95f258a_1" + container "quay.io/biocontainers/spades:3.15.3--h95f258a_0" } input: diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 56a351db..e9d2e96e 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -11,11 +11,11 @@ process TABIX_BGZIP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::tabix=0.2.6" : null) + conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:0.2.6--ha92aebf_0" + container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" } else { - container "quay.io/biocontainers/tabix:0.2.6--ha92aebf_0" + container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" } input: diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 866a8bf8..6cc3322f 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -11,11 +11,11 @@ process TABIX_BGZIPTABIX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::tabix=0.2.6" : null) + conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:0.2.6--ha92aebf_0" + container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" } else { - container "quay.io/biocontainers/tabix:0.2.6--ha92aebf_0" + container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" } input: diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index da23f535..df1e84ee 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -11,11 +11,11 @@ process TABIX_TABIX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::tabix=0.2.6" : null) + conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:0.2.6--ha92aebf_0" + container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" } else { - container "quay.io/biocontainers/tabix:0.2.6--ha92aebf_0" + container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" } input: diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 44b36e71..3c16d66f 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -11,11 +11,11 @@ process TRIMGALORE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::trim-galore=0.6.6" : null) + conda (params.enable_conda ? 'bioconda::trim-galore=0.6.7' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/trim-galore:0.6.6--0" + container "https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0" } else { - container "quay.io/biocontainers/trim-galore:0.6.6--0" + container "quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0" } input: diff --git a/tests/modules/bcftools/filter/test.yml b/tests/modules/bcftools/filter/test.yml index a998f441..0f8e48eb 100644 --- a/tests/modules/bcftools/filter/test.yml +++ b/tests/modules/bcftools/filter/test.yml @@ -5,4 +5,4 @@ - bcftools files: - path: output/bcftools/test.vcf.gz - md5sum: 9d491cfa84067450342ba8e66c75e5b8 + md5sum: fc178eb342a91dc0d1d568601ad8f8e2 diff --git a/tests/modules/bcftools/mpileup/test.yml b/tests/modules/bcftools/mpileup/test.yml index c0c8d6a6..71877e29 100644 --- a/tests/modules/bcftools/mpileup/test.yml +++ b/tests/modules/bcftools/mpileup/test.yml @@ -5,8 +5,8 @@ - bcftools files: - path: output/bcftools/test.bcftools_stats.txt - md5sum: 2d506e32837a53a01fea0fc90402632a + md5sum: 74863ef525eef8d87e3119146d281bcf - path: output/bcftools/test.vcf.gz.tbi - md5sum: 11d90b5b35e4adf6b44fc53bec93bed3 + md5sum: 0772419c5d819b4caa4aebfcad010c6e - path: output/bcftools/test.vcf.gz - md5sum: 2cf273a9fa3784383799b6b24df2f88c + md5sum: 9811674bb8da7ff30581319a910f2396 diff --git a/tests/modules/bcftools/stats/test.yml b/tests/modules/bcftools/stats/test.yml index f863bfca..cd25fe66 100644 --- a/tests/modules/bcftools/stats/test.yml +++ b/tests/modules/bcftools/stats/test.yml @@ -5,4 +5,4 @@ - bcftools/stats files: - path: output/bcftools/test.bcftools_stats.txt - md5sum: c4c5938add12a20050eec3782c8ad623 + md5sum: d3543531396cf7012f13ebdce639cbc8 diff --git a/tests/modules/blast/blastn/test.yml b/tests/modules/blast/blastn/test.yml index ebc03e45..98f76921 100644 --- a/tests/modules/blast/blastn/test.yml +++ b/tests/modules/blast/blastn/test.yml @@ -13,10 +13,10 @@ - path: ./output/blast/blast_db/genome.fasta.nhr md5sum: f4b4ddb034fd3dd7b25c89e9d50c004e - path: ./output/blast/blast_db/genome.fasta.ndb - md5sum: 45f2daf9769957ff80868dd3d80d30a3 + md5sum: 0d553c830656469211de113c5022f06d - path: ./output/blast/blast_db/genome.fasta.not md5sum: 1e53e9d08f1d23af0299cfa87478a7bb - path: ./output/blast/blast_db/genome.fasta.nto md5sum: 33cdeccccebe80329f1fdbee7f5874cb - path: ./output/blast/blast_db/genome.fasta.ntf - md5sum: 1f6027d443e67a98ad0edc2d39971b0c + md5sum: de1250813f0c7affc6d12dac9d0fb6bb diff --git a/tests/modules/blast/makeblastdb/test.yml b/tests/modules/blast/makeblastdb/test.yml index c060ba59..7df17968 100644 --- a/tests/modules/blast/makeblastdb/test.yml +++ b/tests/modules/blast/makeblastdb/test.yml @@ -12,10 +12,10 @@ - path: ./output/blast/blast_db/genome.fasta.nhr md5sum: f4b4ddb034fd3dd7b25c89e9d50c004e - path: ./output/blast/blast_db/genome.fasta.ndb - md5sum: 45f2daf9769957ff80868dd3d80d30a3 + md5sum: 0d553c830656469211de113c5022f06d - path: ./output/blast/blast_db/genome.fasta.not md5sum: 1e53e9d08f1d23af0299cfa87478a7bb - path: ./output/blast/blast_db/genome.fasta.nto md5sum: 33cdeccccebe80329f1fdbee7f5874cb - path: ./output/blast/blast_db/genome.fasta.ntf - md5sum: 1f6027d443e67a98ad0edc2d39971b0c + md5sum: de1250813f0c7affc6d12dac9d0fb6bb diff --git a/tests/modules/deeptools/plotfingerprint/test.yml b/tests/modules/deeptools/plotfingerprint/test.yml index bb96fbe2..b7803a6e 100644 --- a/tests/modules/deeptools/plotfingerprint/test.yml +++ b/tests/modules/deeptools/plotfingerprint/test.yml @@ -7,7 +7,7 @@ - path: output/deeptools/test.plotFingerprint.pdf - path: output/deeptools/test.plotFingerprint.qcmetrics.txt contains: - - "AUC" - - "0.24184576629880325" + - "AUC" + - "0.24184576629880325" - path: output/deeptools/test.plotFingerprint.raw.txt - md5sum: e2a9ff341a315f49e7c8387a3323bdfb + md5sum: aff8e53de0ddd893aa9d8f9d4ce7e291 diff --git a/tests/modules/gubbins/test.yml b/tests/modules/gubbins/test.yml index 31e426b1..7bc0216b 100644 --- a/tests/modules/gubbins/test.yml +++ b/tests/modules/gubbins/test.yml @@ -10,14 +10,14 @@ - path: output/gubbins/all_sites.recombination_predictions.gff md5sum: f95871e79968340cb82532e2c9b0c92b - path: output/gubbins/all_sites.branch_base_reconstruction.embl - md5sum: 02e6fb268f7422bfe34b27ecd3b2c245 + md5sum: 9e051646d630f528fff58f1f73286006 - path: output/gubbins/all_sites.summary_of_snp_distribution.vcf md5sum: 276e62e888ea811577c8ffb2da0b3aff - path: output/gubbins/all_sites.per_branch_statistics.csv - md5sum: 94b09b25d10504b56aa0307beae98a98 + md5sum: 25e4fdb6681c3709a9add1d5632bbf3e - path: output/gubbins/all_sites.filtered_polymorphic_sites.phylip md5sum: 0a77f397a7797c5c3386832745b0c97a - path: output/gubbins/all_sites.final_tree.tre - md5sum: e3c7ea18e2c5c49774c0e2ff78bd1818 + md5sum: 6cb251b58307aab11cb4b48792d6cda1 - path: output/gubbins/all_sites.node_labelled.final_tree.tre - md5sum: 7727b4c4111ebf49cc8a4f1fdd25092c + md5sum: e01f965a15924b4f97603b8011c8d3f7 diff --git a/tests/modules/hisat2/align/test.yml b/tests/modules/hisat2/align/test.yml index 9a1fb271..1c6c8ac2 100644 --- a/tests/modules/hisat2/align/test.yml +++ b/tests/modules/hisat2/align/test.yml @@ -14,7 +14,7 @@ - path: output/index/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - path: output/index/hisat2/genome.1.ht2 - md5sum: 3ea3dc41304941ad8d047e4d71b4899e + md5sum: 057cfa8a22b97ee9cff4c8d342498803 - path: output/index/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - path: output/index/hisat2/genome.6.ht2 @@ -42,7 +42,7 @@ - path: output/index/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - path: output/index/hisat2/genome.1.ht2 - md5sum: 3ea3dc41304941ad8d047e4d71b4899e + md5sum: 057cfa8a22b97ee9cff4c8d342498803 - path: output/index/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - path: output/index/hisat2/genome.6.ht2 diff --git a/tests/modules/hisat2/build_test/test.yml b/tests/modules/hisat2/build_test/test.yml index 2a8fe324..a8bb2390 100644 --- a/tests/modules/hisat2/build_test/test.yml +++ b/tests/modules/hisat2/build_test/test.yml @@ -11,7 +11,7 @@ - path: output/index/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - path: output/index/hisat2/genome.1.ht2 - md5sum: 3ea3dc41304941ad8d047e4d71b4899e + md5sum: 057cfa8a22b97ee9cff4c8d342498803 - path: output/index/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - path: output/index/hisat2/genome.6.ht2 diff --git a/tests/modules/kallistobustools/count/main.nf b/tests/modules/kallistobustools/count/main.nf index 051195af..4400976a 100644 --- a/tests/modules/kallistobustools/count/main.nf +++ b/tests/modules/kallistobustools/count/main.nf @@ -5,20 +5,20 @@ nextflow.enable.dsl = 2 include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger"] ) workflow test_kallistobustools_count { - - input = [ [id:'test_standard'], // meta map - [file("https://github.com/nf-core/test-datasets/blob/modules/data/genomics/homo_sapiens/illumina/10xgenomics/test_1.fastq.gz?raw=true", checkIfExists: true), - file("https://github.com/nf-core/test-datasets/blob/modules/data/genomics/homo_sapiens/illumina/10xgenomics/test_2.fastq.gz?raw=true", checkIfExists: true)] - ] - - index = file("https://github.com/FloWuenne/test-datasets/blob/scrnaseq/reference/kallistobustools/kb_ref.idx?raw=true", checkIfExists: true) - t2g = file("https://raw.githubusercontent.com/FloWuenne/test-datasets/scrnaseq/reference/kallistobustools/t2g.txt", checkIfExists: true) - t1c = file('t1c_dummy') - t2c = file('t2c_dummy') - use_t1c = false - use_t2c = false - workflow = "standard" - technology = "10XV3" - KALLISTOBUSTOOLS_COUNT (input,index,t2g,t1c,t2c,use_t1c,use_t2c,workflow,technology) + input = [ + [id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_10x_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_10x_2_fastq_gz'], checkIfExists: true) + ] + ] + + index = file("https://github.com/FloWuenne/test-datasets/blob/scrnaseq/reference/kallistobustools/kb_ref.idx?raw=true", checkIfExists: true) + t2g = file("https://raw.githubusercontent.com/FloWuenne/test-datasets/scrnaseq/reference/kallistobustools/t2g.txt", checkIfExists: true) + t1c = [] + t2c = [] + workflow = "standard" + technology = "10XV3" + + KALLISTOBUSTOOLS_COUNT ( input, index, t2g, t1c, t2c, workflow, technology ) } diff --git a/tests/modules/kallistobustools/count/test.yml b/tests/modules/kallistobustools/count/test.yml index 8ff9a66f..766d5b57 100644 --- a/tests/modules/kallistobustools/count/test.yml +++ b/tests/modules/kallistobustools/count/test.yml @@ -4,33 +4,33 @@ - kallistobustools/count - kallistobustools files: - - path: output/kallistobustools/test_standard_kallistobustools_count/10xv3_whitelist.txt + - path: output/kallistobustools/test.count/10x_version3_whitelist.txt md5sum: 3d36d0a4021fd292b265e2b5e72aaaf3 - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cellranger/barcodes.tsv + - path: output/kallistobustools/test.count/counts_unfiltered/cellranger/barcodes.tsv md5sum: 8f734732c46f52c4d1c025bfe4134bd2 - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cellranger/genes.tsv + - path: output/kallistobustools/test.count/counts_unfiltered/cellranger/genes.tsv md5sum: fbebf995a3de568db8ac028cd0c5d993 - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cellranger/matrix.mtx + - path: output/kallistobustools/test.count/counts_unfiltered/cellranger/matrix.mtx md5sum: 4847bae27c41961496d504bcfe9890ba - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cells_x_genes.barcodes.txt + - path: output/kallistobustools/test.count/counts_unfiltered/cells_x_genes.barcodes.txt md5sum: cafdf96423987e3d9e807cdc16139541 - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cells_x_genes.genes.txt + - path: output/kallistobustools/test.count/counts_unfiltered/cells_x_genes.genes.txt md5sum: 52d0627aaf0418bebe3ef75ad77da53f - - path: output/kallistobustools/test_standard_kallistobustools_count/counts_unfiltered/cells_x_genes.mtx + - path: output/kallistobustools/test.count/counts_unfiltered/cells_x_genes.mtx md5sum: d05e1582385ba5f215fa73c470343c06 - - path: output/kallistobustools/test_standard_kallistobustools_count/inspect.json + - path: output/kallistobustools/test.count/inspect.json md5sum: d3d23063f3fc07f7fbd24748aa4271a9 - - path: output/kallistobustools/test_standard_kallistobustools_count/kb_info.json - contains: - - 'kallisto' - - path: output/kallistobustools/test_standard_kallistobustools_count/matrix.ec + - path: output/kallistobustools/test.count/kb_info.json + contains: + - "kallisto" + - path: output/kallistobustools/test.count/matrix.ec md5sum: cd8340e3fb78d74ad85fabdbe0a778f0 - - path: output/kallistobustools/test_standard_kallistobustools_count/output.bus + - path: output/kallistobustools/test.count/output.bus md5sum: f4702922bd0c142e34b3680c2251426a - - path: output/kallistobustools/test_standard_kallistobustools_count/output.unfiltered.bus + - path: output/kallistobustools/test.count/output.unfiltered.bus md5sum: e38f99748e598e33fe035b89e7c89fb5 - - path: output/kallistobustools/test_standard_kallistobustools_count/run_info.json - contains: - - 'n_targets' - - path: output/kallistobustools/test_standard_kallistobustools_count/transcripts.txt + - path: output/kallistobustools/test.count/run_info.json + contains: + - "n_targets" + - path: output/kallistobustools/test.count/transcripts.txt md5sum: 6d583083eaf6ca81e409332a40d2e74c diff --git a/tests/modules/kallistobustools/ref/test.yml b/tests/modules/kallistobustools/ref/test.yml index 1821533a..54954085 100644 --- a/tests/modules/kallistobustools/ref/test.yml +++ b/tests/modules/kallistobustools/ref/test.yml @@ -5,11 +5,8 @@ - kallistobustools files: - path: output/kallistobustools/cdna.fa - md5sum: 0de7fdac9e7418576987ed93640927c7 - path: output/kallistobustools/kb_ref_out.idx - md5sum: 7d5cd0731e7c467e5521d761f7d79895 - path: output/kallistobustools/t2g.txt - md5sum: d5bcfd61ff078aa92e576abf8cc4fc08 - name: kallistobustools ref test_kallistobustools_ref_lamanno command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c tests/config/nextflow.config @@ -18,17 +15,11 @@ - kallistobustools files: - path: output/kallistobustools/cdna.fa - md5sum: 0de7fdac9e7418576987ed93640927c7 - path: output/kallistobustools/cdna_t2c.txt - md5sum: 6d583083eaf6ca81e409332a40d2e74c - path: output/kallistobustools/intron.fa - md5sum: ca5ca147afa0a25f6a730edfb39a6098 - path: output/kallistobustools/intron_t2c.txt - md5sum: 728480007abfdbdb248f7ba9de6026ef - path: output/kallistobustools/kb_ref_out.idx - md5sum: 48ca8323aadd30b1762d4e8f5a7d7aee - path: output/kallistobustools/t2g.txt - md5sum: 7cae93dfb9a26f70adf4a57fe2a83027 - name: kallistobustools ref test_kallistobustools_ref_nucleus command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c tests/config/nextflow.config @@ -37,14 +28,8 @@ - kallistobustools files: - path: output/kallistobustools/cdna.fa - md5sum: 0de7fdac9e7418576987ed93640927c7 - path: output/kallistobustools/cdna_t2c.txt - md5sum: 6d583083eaf6ca81e409332a40d2e74c - path: output/kallistobustools/intron.fa - md5sum: ca5ca147afa0a25f6a730edfb39a6098 - path: output/kallistobustools/intron_t2c.txt - md5sum: 728480007abfdbdb248f7ba9de6026ef - path: output/kallistobustools/kb_ref_out.idx - md5sum: 48ca8323aadd30b1762d4e8f5a7d7aee - path: output/kallistobustools/t2g.txt - md5sum: 7cae93dfb9a26f70adf4a57fe2a83027 diff --git a/tests/modules/last/lastal/test.yml b/tests/modules/last/lastal/test.yml index c6966a5f..48b0d223 100644 --- a/tests/modules/last/lastal/test.yml +++ b/tests/modules/last/lastal/test.yml @@ -5,7 +5,7 @@ - last/lastal files: - path: output/last/contigs.genome.maf.gz - md5sum: 2fc56553282d0826310bdef16a55e587 + md5sum: 670f4fa1a94b23690cdb6fc603813c75 - path: output/untar/lastdb/genome.bck md5sum: 5519879b9b6c4d1fc508da7f17f88f2e - path: output/untar/lastdb/genome.des @@ -28,7 +28,7 @@ - last/lastal files: - path: output/last/contigs.genome.maf.gz - md5sum: f50557bed5430b42de7b0d5d61075cf0 + md5sum: b0202b013e1caa9163516cd4ff4fbdbc - path: output/untar/lastdb/genome.bck md5sum: 5519879b9b6c4d1fc508da7f17f88f2e - path: output/untar/lastdb/genome.des diff --git a/tests/modules/last/lastdb/test.yml b/tests/modules/last/lastdb/test.yml index fed274eb..c69ecfac 100644 --- a/tests/modules/last/lastdb/test.yml +++ b/tests/modules/last/lastdb/test.yml @@ -9,7 +9,7 @@ - path: output/last/lastdb/test.des md5sum: 3a9ea6d336e113a74d7fdca5e7b623fc - path: output/last/lastdb/test.prj - md5sum: 2c981eb9b9d2012d8413946a5b378f20 + md5sum: 6948d17d2a10e470ea545f659930a543 - path: output/last/lastdb/test.sds md5sum: 2cd381f4f8a9c52cfcd323a2863eccb2 - path: output/last/lastdb/test.ssp @@ -30,7 +30,7 @@ - path: output/last/lastdb/test.des md5sum: 26ab49015cc572172b9efa50fc5190bc - path: output/last/lastdb/test.prj - md5sum: aec51a18da1c2361aaca70dd16eb7b7b + md5sum: d253fc4320d9b4d7fcfc43b2734412ee - path: output/last/lastdb/test.sds md5sum: cad9927d4bd161257e98165ad755d8e4 - path: output/last/lastdb/test.ssp diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index 484fa9f7..f9b762bb 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -5,7 +5,7 @@ - minimap2/align files: - path: ./output/minimap2/test.paf - md5sum: 5a9648fc67c30a2c83b0ef094171faa0 + md5sum: 70e8cf299ee3ecd33e629d10c1f588ce - name: minimap2 align paired-end command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - minimap2/align files: - path: ./output/minimap2/test.paf - md5sum: e7b952be872bdbef16bf99d512690df7 + md5sum: 5e7b55a26bf0ea3a2843423d3e0b9a28 diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index db3cf253..24f468ce 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -6,7 +6,7 @@ files: - path: ./output/picard/test.MarkDuplicates.metrics.txt - path: ./output/picard/test.bam - md5sum: fe8ed25b4bd25be0cc7a8730fc3b2f30 + md5sum: 3270bb142039e86aaf2ab83c540225d5 - name: picard markduplicates on unsorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config diff --git a/tests/modules/picard/mergesamfiles/test.yml b/tests/modules/picard/mergesamfiles/test.yml index be82034f..114c1f01 100644 --- a/tests/modules/picard/mergesamfiles/test.yml +++ b/tests/modules/picard/mergesamfiles/test.yml @@ -5,4 +5,4 @@ - picard/mergesamfiles files: - path: ./output/picard/test.bam - md5sum: b8bd6c22f36c6ebc91bca98bd637a2eb + md5sum: 82bb91735aff82eae4f0b631114e9e15 diff --git a/tests/modules/salmon/index/test.yml b/tests/modules/salmon/index/test.yml index 722cd24f..156bc5ca 100644 --- a/tests/modules/salmon/index/test.yml +++ b/tests/modules/salmon/index/test.yml @@ -8,7 +8,7 @@ - path: ./output/index/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - path: ./output/index/salmon/versionInfo.json - md5sum: 204865f645102587c4953fccb256797c + md5sum: 6c764bd219b7bc17168a99d232c0fe09 - path: ./output/index/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - path: ./output/index/salmon/mphf.bin diff --git a/tests/modules/salmon/quant/test.yml b/tests/modules/salmon/quant/test.yml index 5a1ebdd8..d7ed0d0f 100644 --- a/tests/modules/salmon/quant/test.yml +++ b/tests/modules/salmon/quant/test.yml @@ -9,7 +9,7 @@ md5sum: 687368b9963874c1797d210310b38516 - path: ./output/salmon/test/lib_format_counts.json - path: ./output/salmon/test/quant.genes.sf - md5sum: ad4d31437f06db49b2436abeec29c78e + md5sum: af6d88f109e0d0d6a0826bdf2b3b7e97 - path: ./output/salmon/test/logs/salmon_quant.log - path: ./output/salmon/test/aux_info/expected_bias.gz md5sum: 24ee10af39b41ecf4f4e08faaaf537ee @@ -27,7 +27,7 @@ - path: ./output/index/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - path: ./output/index/salmon/versionInfo.json - md5sum: 204865f645102587c4953fccb256797c + md5sum: 6c764bd219b7bc17168a99d232c0fe09 - path: ./output/index/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - path: ./output/index/salmon/mphf.bin @@ -60,7 +60,7 @@ md5sum: 4abd35d0a60b5279b394424f0e6ea42d - path: ./output/salmon/test/lib_format_counts.json - path: ./output/salmon/test/quant.genes.sf - md5sum: d750f8c9f248e30c3a6d0c2678bf9c6a + md5sum: 29c8cd26f609cacd4fb88713df9c71c2 - path: ./output/salmon/test/logs/salmon_quant.log - path: ./output/salmon/test/aux_info/expected_bias.gz md5sum: 24ee10af39b41ecf4f4e08faaaf537ee @@ -78,7 +78,7 @@ - path: ./output/index/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - path: ./output/index/salmon/versionInfo.json - md5sum: 204865f645102587c4953fccb256797c + md5sum: 6c764bd219b7bc17168a99d232c0fe09 - path: ./output/index/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - path: ./output/index/salmon/mphf.bin @@ -112,7 +112,7 @@ md5sum: 687368b9963874c1797d210310b38516 - path: ./output/salmon/test/lib_format_counts.json - path: ./output/salmon/test/quant.genes.sf - md5sum: ad4d31437f06db49b2436abeec29c78e + md5sum: af6d88f109e0d0d6a0826bdf2b3b7e97 - path: ./output/salmon/test/logs/salmon_quant.log - path: output/salmon/test/aux_info/expected_bias.gz md5sum: 24ee10af39b41ecf4f4e08faaaf537ee @@ -130,7 +130,7 @@ - path: output/index/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - path: output/index/salmon/versionInfo.json - md5sum: 204865f645102587c4953fccb256797c + md5sum: 6c764bd219b7bc17168a99d232c0fe09 - path: output/index/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - path: output/index/salmon/mphf.bin diff --git a/tests/modules/samtools/fastq/test.yml b/tests/modules/samtools/fastq/test.yml index 3fdc0ef6..bfcf5c92 100644 --- a/tests/modules/samtools/fastq/test.yml +++ b/tests/modules/samtools/fastq/test.yml @@ -5,6 +5,6 @@ - samtools/fastq files: - path: output/samtools/test_2.fastq.gz - md5sum: 229daf1a62d114cae42c65801e8c0114 + md5sum: 3b1c92f33a44a78d82f8360ab4fdfd61 - path: output/samtools/test_1.fastq.gz - md5sum: 4cab81f76e66361611621377f1b69d1d + md5sum: 5a3f9c69a032c4ffd9071ea31a14e6f9 diff --git a/tests/modules/samtools/flagstat/test.yml b/tests/modules/samtools/flagstat/test.yml index 18671e25..0da6c2f4 100644 --- a/tests/modules/samtools/flagstat/test.yml +++ b/tests/modules/samtools/flagstat/test.yml @@ -5,4 +5,4 @@ - samtools/flagstat files: - path: ./output/samtools/test.paired_end.sorted.bam.flagstat - md5sum: 6d7934c303b15ce473f64d502b79984e + md5sum: 4f7ffd1e6a5e85524d443209ac97d783 diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 31d35c0f..25c39d63 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -5,4 +5,4 @@ - samtools/mpileup files: - path: ./output/samtools/test.mpileup - md5sum: 3608af83ffe3efbb1337f0ffb205337d + md5sum: 958e6bead4103d72026f80153b6b5150 diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 2ce00247..477574fe 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -5,4 +5,4 @@ - samtools/sort files: - path: output/samtools/test.bam - md5sum: 3997667dee6b45d682865c6bf82d0378 + md5sum: bdc2d9e3f579f84df1e242207b627f89 diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index 32854c05..cf44b846 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -5,4 +5,4 @@ - samtools/stats files: - path: ./output/samtools/test.paired_end.sorted.bam.stats - md5sum: 95f7edae5d02c10c4004d9ab1d7d8ef3 + md5sum: a7f36cf11fd3bf97e0a0ae29c0627296 diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index eeff24cd..35beb1a7 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -4,11 +4,11 @@ - spades files: - path: output/spades/test.assembly.gfa - md5sum: b2616d2beba83ab7d361b54778d1e759 + md5sum: a995d1d413031534180d2b3b715fa921 - path: output/spades/test.contigs.fa - md5sum: 2690fefde046bc904e90df09a065257a + md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 - path: output/spades/test.scaffolds.fa - md5sum: 2690fefde046bc904e90df09a065257a + md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 - path: output/spades/test.spades.log - name: spades paired end @@ -17,7 +17,7 @@ - spades files: - path: output/spades/test.assembly.gfa - md5sum: faf76135ee390606b899c0197dc38e04 + md5sum: bb053ef4e9250829c980ca17fbdbe3e9 - path: output/spades/test.contigs.fa - md5sum: 6148e25b33890c80f176f90f2dd88989 + md5sum: 4476d409da70d9f7fc2aa8f25bbaf7fd - path: output/spades/test.spades.log diff --git a/tests/modules/tabix/bgzip/test.yml b/tests/modules/tabix/bgzip/test.yml index e254daed..58412979 100644 --- a/tests/modules/tabix/bgzip/test.yml +++ b/tests/modules/tabix/bgzip/test.yml @@ -5,4 +5,4 @@ - tabix/bgzip files: - path: ./output/tabix/test.vcf.gz - md5sum: 0f1c94af3aa3e7e203d9e034ef6f8f4d + md5sum: fc178eb342a91dc0d1d568601ad8f8e2 diff --git a/tests/modules/tabix/bgziptabix/test.yml b/tests/modules/tabix/bgziptabix/test.yml index c0b9c247..31048109 100644 --- a/tests/modules/tabix/bgziptabix/test.yml +++ b/tests/modules/tabix/bgziptabix/test.yml @@ -5,6 +5,6 @@ - tabix/bgziptabix files: - path: ./output/tabix/test.gz - md5sum: 0f1c94af3aa3e7e203d9e034ef6f8f4d + md5sum: fc178eb342a91dc0d1d568601ad8f8e2 - path: ./output/tabix/test.gz.tbi - md5sum: bbec39fd53cf2834909d52094980d094 + md5sum: 36e11bf96ed0af4a92caa91a68612d64 diff --git a/tests/modules/tabix/tabix/test.yml b/tests/modules/tabix/tabix/test.yml index 41c4a940..646215c8 100644 --- a/tests/modules/tabix/tabix/test.yml +++ b/tests/modules/tabix/tabix/test.yml @@ -5,7 +5,7 @@ - tabix/tabix files: - path: ./output/tabix/test.bed.gz.tbi - md5sum: 115922d881d24879b15d20c3734495ac + md5sum: 5b40851ab6b8ccf7946313c86481c0df - name: tabix tabix gff command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c tests/config/nextflow.config tags: @@ -13,7 +13,7 @@ - tabix/tabix files: - path: ./output/tabix/genome.gff3.gz.tbi - md5sum: 4059fe4762568194cf293fc6df7b358b + md5sum: f79a67d95a98076e04fbe0455d825926 - name: tabix tabix vcf command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c tests/config/nextflow.config tags: @@ -21,4 +21,4 @@ - tabix/tabix files: - path: output/tabix/test.vcf.gz.tbi - md5sum: bbec39fd53cf2834909d52094980d094 + md5sum: 36e11bf96ed0af4a92caa91a68612d64 From 29c847424034eb04765d7378fb384ad3094a66a6 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 28 Jul 2021 19:49:50 +0100 Subject: [PATCH 034/314] Update Nextclade to output all files by default (#638) --- modules/nextclade/main.nf | 25 +++++++++--------------- modules/nextclade/meta.yml | 7 +------ tests/modules/nextclade/main.nf | 33 +++++++------------------------- tests/modules/nextclade/test.yml | 22 ++------------------- 4 files changed, 19 insertions(+), 68 deletions(-) diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 24ca7309..8319f6b1 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -20,35 +20,28 @@ process NEXTCLADE { input: tuple val(meta), path(fasta) - val output_format output: - tuple val(meta), path("${prefix}.csv") , optional:true, emit: csv - tuple val(meta), path("${prefix}.json") , optional:true, emit: json - tuple val(meta), path("${prefix}.tree.json") , optional:true, emit: json_tree - tuple val(meta), path("${prefix}.tsv") , optional:true, emit: tsv + tuple val(meta), path("${prefix}.csv") , emit: csv + tuple val(meta), path("${prefix}.json") , emit: json + tuple val(meta), path("${prefix}.tree.json") , emit: json_tree + tuple val(meta), path("${prefix}.tsv") , emit: tsv tuple val(meta), path("${prefix}.clades.tsv"), optional:true, emit: tsv_clades path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def format = output_format - if (!(format in ['json', 'csv', 'tsv', 'tree', 'tsv-clades-only'])) { - format = 'json' - } - def extension = format - if (format in ['tsv-clades-only']) { - extension = '.clades.tsv' - } else if (format in ['tree']) { - extension = 'tree.json' - } """ nextclade \\ $options.args \\ --jobs $task.cpus \\ --input-fasta $fasta \\ - --output-${format} ${prefix}.${extension} + --output-json ${prefix}.json \\ + --output-csv ${prefix}.csv \\ + --output-tsv ${prefix}.tsv \\ + --output-tsv-clades-only ${prefix}.clades.tsv \\ + --output-tree ${prefix}.tree.json echo \$(nextclade --version 2>&1) > ${software}.version.txt """ diff --git a/modules/nextclade/meta.yml b/modules/nextclade/meta.yml index 8e0eabd7..d321e08f 100755 --- a/modules/nextclade/meta.yml +++ b/modules/nextclade/meta.yml @@ -11,7 +11,7 @@ tools: documentation: None tool_dev_url: https://github.com/nextstrain/nextclade doi: "" - licence: ['MIT'] + licence: ["MIT"] input: - meta: @@ -23,11 +23,6 @@ input: type: file description: FASTA file containing one or more consensus sequences pattern: "*.{fasta,fa}" - - output_format: - type: string - description: | - String for output format supported by nextclade - i.e one of 'json', 'csv', 'tsv', 'tree', 'tsv-clades-only' output: - meta: diff --git a/tests/modules/nextclade/main.nf b/tests/modules/nextclade/main.nf index fe8f72c9..93c50ca5 100755 --- a/tests/modules/nextclade/main.nf +++ b/tests/modules/nextclade/main.nf @@ -4,30 +4,11 @@ nextflow.enable.dsl = 2 include { NEXTCLADE } from '../../../modules/nextclade/main.nf' addParams( options: [:] ) -workflow test_nextclade_json { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] - NEXTCLADE ( input, 'json' ) -} - -workflow test_nextclade_csv { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] - NEXTCLADE ( input, 'csv' ) -} - -workflow test_nextclade_tsv { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] - NEXTCLADE ( input, 'tsv' ) -} - -workflow test_nextclade_tree { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] - NEXTCLADE ( input, 'tree' ) +workflow test_nextclade { + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + NEXTCLADE ( input ) } diff --git a/tests/modules/nextclade/test.yml b/tests/modules/nextclade/test.yml index 9826dbad..4d1d7743 100755 --- a/tests/modules/nextclade/test.yml +++ b/tests/modules/nextclade/test.yml @@ -1,31 +1,13 @@ -- name: nextclade test_nextclade_json - command: nextflow run tests/modules/nextclade -entry test_nextclade_json -c tests/config/nextflow.config +- name: nextclade test_nextclade + command: nextflow run tests/modules/nextclade -entry test_nextclade -c tests/config/nextflow.config tags: - nextclade files: - path: output/nextclade/test.json md5sum: cab92830c5cb66076e7d6c054ea98362 - -- name: nextclade test_nextclade_csv - command: nextflow run tests/modules/nextclade -entry test_nextclade_csv -c tests/config/nextflow.config - tags: - - nextclade - files: - path: output/nextclade/test.csv md5sum: 4f7096df9be51f99a0d62a38653b29cf - -- name: nextclade test_nextclade_tsv - command: nextflow run tests/modules/nextclade -entry test_nextclade_tsv -c tests/config/nextflow.config - tags: - - nextclade - files: - path: output/nextclade/test.tsv md5sum: fe07dc4ffcd81742ca9bef93f88e8836 - -- name: nextclade test_nextclade_tree - command: nextflow run tests/modules/nextclade -entry test_nextclade_tree -c tests/config/nextflow.config - tags: - - nextclade - files: - path: output/nextclade/test.tree.json md5sum: 5c57dd724bc2b5cfde8f42a17ff2865a From 2a7c60e9652fc37ad39e784f1b87b218fae8c94c Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Sat, 31 Jul 2021 11:30:26 +0200 Subject: [PATCH 035/314] Guidelines update: describe workaround for modules called `build` (#647) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 7239b24d..02638474 100644 --- a/README.md +++ b/README.md @@ -381,6 +381,8 @@ Please follow the steps below to run the tests locally: - See [docs on running pytest-workflow](https://pytest-workflow.readthedocs.io/en/stable/#running-pytest-workflow) for more info. +> :warning: if you have a module named `build` this can conflict with some pytest internal behaviour. This results in no tests being run (i.e. recieving a message of `collected 0 items`). In this case rename the `tests//build` directry to `tests//build_test`, and update the corresponding `test.yml` accordingly. An example can be seen with the [`bowtie2/build` module tests](https://github.com/nf-core/modules/tree/master/tests/modules/bowtie2/build_test). + ### Uploading to `nf-core/modules` [Fork](https://help.github.com/articles/fork-a-repo/) the `nf-core/modules` repository to your own GitHub account. Within the local clone of your fork add the module file to the [`modules/`](modules) directory. Please try and keep PRs as atomic as possible to aid the reviewing process - ideally, one module addition/update per PR. From e01a98a7048a4e648b73c880586e9f4f39fd542c Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 2 Aug 2021 09:40:57 +0200 Subject: [PATCH 036/314] module: unzip (#642) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add unzip module * Remove missing TODOs update mtea * Apply changes after code-review from @grst * Account for user trying to supply two input archives * Remove debugging test * Update modules/unzip/main.nf Co-authored-by: Jose Espinosa-Carrasco * Correct output path Co-authored-by: Jose Espinosa-Carrasco --- modules/unzip/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/unzip/main.nf | 43 +++++++++++++++++++++ modules/unzip/meta.yml | 31 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/unzip/main.nf | 12 ++++++ tests/modules/unzip/test.yml | 7 ++++ 7 files changed, 166 insertions(+) create mode 100644 modules/unzip/functions.nf create mode 100644 modules/unzip/main.nf create mode 100644 modules/unzip/meta.yml create mode 100644 tests/modules/unzip/main.nf create mode 100644 tests/modules/unzip/test.yml diff --git a/modules/unzip/functions.nf b/modules/unzip/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/unzip/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf new file mode 100644 index 00000000..b52fbb04 --- /dev/null +++ b/modules/unzip/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process UNZIP { + tag "$archive" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + + conda (params.enable_conda ? "bioconda::p7zip=15.09" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4" + } else { + container "quay.io/biocontainers/p7zip:15.09--h2d50403_4" + } + + input: + path archive + + output: + path "${archive.baseName}/" , emit: unzipped_archive + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + + if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } + + """ + 7za \\ + e \\ + -o"${archive.baseName}"/ \\ + $options.args \\ + $archive + + echo \$(7za --help) | grep Version | sed 's/.*p7zip Version//; s/(.*//' 1> ${software}.version.txt + """ +} diff --git a/modules/unzip/meta.yml b/modules/unzip/meta.yml new file mode 100644 index 00000000..97b1f1fc --- /dev/null +++ b/modules/unzip/meta.yml @@ -0,0 +1,31 @@ +name: unzip +description: Unzip ZIP archive files +keywords: + - unzip + - decompression +tools: + - unzip: + description: p7zip is a quick port of 7z.exe and 7za.exe (command line version of 7zip, see www.7-zip.org) for Unix. + homepage: https://sourceforge.net/projects/p7zip/ + documentation: https://sourceforge.net/projects/p7zip/ + tool_dev_url: https://sourceforge.net/projects/p7zip" + licence: "GNU LPGL" + +input: + - archive: + type: file + description: ZIP file + pattern: "*.zip" + +output: + - version: + type: file + description: File or directory of decompressed archive + pattern: "*.{version.txt}" + - unzipped_archive: + type: directory + description: Directory contents of the unzipped archive + pattern: '${archive.baseName}/' + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 885978df..343c9813 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -835,6 +835,10 @@ untar: - modules/untar/** - tests/modules/untar/** +unzip: + - modules/unzip/** + - tests/modules/unzip/** + variantbam: - modules/variantbam/** - tests/modules/variantbam/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 3f86d7ba..5643c364 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -7,6 +7,7 @@ params { 'genome' { genome_fasta = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta" genome_fasta_fai = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta.fai" + genome_fasta_zip = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta.zip" genome_dict = "${test_data_dir}/genomics/sarscov2/genome/genome.dict" genome_gff3 = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3" genome_gff3_gz = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3.gz" diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf new file mode 100644 index 00000000..b7f668b1 --- /dev/null +++ b/tests/modules/unzip/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNZIP } from '../../../modules/unzip/main.nf' addParams( options: [:] ) + +workflow test_unzip { + + archive = file(params.test_data['sarscov2']['genome']['genome_fasta_zip'], checkIfExists: true) + + UNZIP ( archive ) +} diff --git a/tests/modules/unzip/test.yml b/tests/modules/unzip/test.yml new file mode 100644 index 00000000..93066eb0 --- /dev/null +++ b/tests/modules/unzip/test.yml @@ -0,0 +1,7 @@ +- name: unzip + command: nextflow run ./tests/modules/unzip -entry test_unzip -c tests/config/nextflow.config + tags: + - unzip + files: + - path: output/unzip/genome.fasta/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 From 8a2f01c4168ef1f5ceb5ab4206a8545423cefc9b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 2 Aug 2021 15:21:23 +0200 Subject: [PATCH 037/314] module: bwa/samse (#626) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add bwa/aln module * Also output reads as required with SAI * Add bwa samse * Fix container paths * remove TODO comment * Updated based on code from from @grst on bwa/sampe * Clarify output docs --- modules/bwa/samse/functions.nf | 68 ++++++++++++++++++++++++++++++++ modules/bwa/samse/main.nf | 46 +++++++++++++++++++++ modules/bwa/samse/meta.yml | 59 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bwa/samse/main.nf | 19 +++++++++ tests/modules/bwa/samse/test.yml | 8 ++++ 6 files changed, 204 insertions(+) create mode 100644 modules/bwa/samse/functions.nf create mode 100644 modules/bwa/samse/main.nf create mode 100644 modules/bwa/samse/meta.yml create mode 100644 tests/modules/bwa/samse/main.nf create mode 100644 tests/modules/bwa/samse/test.yml diff --git a/modules/bwa/samse/functions.nf b/modules/bwa/samse/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bwa/samse/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf new file mode 100644 index 00000000..3fe8bdd8 --- /dev/null +++ b/modules/bwa/samse/main.nf @@ -0,0 +1,46 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BWA_SAMSE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" + } else { + container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" + } + + input: + tuple val(meta), path(reads), path(sai) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def read_group = meta.read_group ? "-r ${meta.read_group}" : "" + + """ + INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` + + bwa samse \\ + $options.args \\ + $read_group \\ + \$INDEX \\ + $sai \\ + $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam + + echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bwa/samse/meta.yml b/modules/bwa/samse/meta.yml new file mode 100644 index 00000000..89917703 --- /dev/null +++ b/modules/bwa/samse/meta.yml @@ -0,0 +1,59 @@ +name: bwa_samse +description: Convert bwa SA coordinate file to SAM format +keywords: + - bwa + - aln + - short-read + - align + - reference + - fasta + - map + - sam + - bam + +tools: + - bwa: + description: | + BWA is a software package for mapping DNA sequences against + a large reference genome, such as the human genome. + homepage: http://bio-bwa.sourceforge.net/ + documentation: http://bio-bwa.sourceforge.net/ + doi: "10.1093/bioinformatics/btp324" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information. + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ files specified alongside meta in input channel. + pattern: "*.{fastq,fq}.gz" + - sai: + type: file + description: SAI file specified alongside meta and reads in input channel. + pattern: "*.sai" + - index: + type: directory + description: Directory containing BWA index files (amb,ann,bwt,pac,sa) from BWA_INDEX + pattern: "bwa/" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: BAM file + pattern: "*.bam" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 343c9813..150549d3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -170,6 +170,10 @@ bwa/mem: - modules/bwa/mem/** - tests/modules/bwa/mem/** +bwa/samse: + - modules/bwa/samse/** + - tests/modules/bwa/samse/** + bwamem2/index: - modules/bwamem2/index/** - tests/modules/bwamem2/index/** diff --git a/tests/modules/bwa/samse/main.nf b/tests/modules/bwa/samse/main.nf new file mode 100644 index 00000000..5a5d8d2b --- /dev/null +++ b/tests/modules/bwa/samse/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' addParams( options: [:] ) + +workflow test_bwa_samse { + + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_ALN ( input, BWA_INDEX.out.index ) + BWA_SAMSE ( BWA_ALN.out.sai, BWA_INDEX.out.index ) +} diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml new file mode 100644 index 00000000..597844d4 --- /dev/null +++ b/tests/modules/bwa/samse/test.yml @@ -0,0 +1,8 @@ +- name: bwa samse + command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c tests/config/nextflow.config + tags: + - bwa + - bwa/samse + files: + - path: output/bwa/test.bam + md5sum: 27eb91146e45dee65664c18596be4262 From 4f1c1601cf73329dc717cbd3db95ffa82c97b86d Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 2 Aug 2021 15:37:48 +0200 Subject: [PATCH 038/314] module: bwa/sampe (#625) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add bwa/aln module * Also output reads as required with SAI * Add sampe * Fix container paths * Update based on code review from @grst * Update input docs --- modules/bwa/sampe/functions.nf | 68 ++++++++++++++++++++++++++++++++ modules/bwa/sampe/main.nf | 46 +++++++++++++++++++++ modules/bwa/sampe/meta.yml | 58 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bwa/sampe/main.nf | 20 ++++++++++ tests/modules/bwa/sampe/test.yml | 8 ++++ 6 files changed, 204 insertions(+) create mode 100644 modules/bwa/sampe/functions.nf create mode 100644 modules/bwa/sampe/main.nf create mode 100644 modules/bwa/sampe/meta.yml create mode 100644 tests/modules/bwa/sampe/main.nf create mode 100644 tests/modules/bwa/sampe/test.yml diff --git a/modules/bwa/sampe/functions.nf b/modules/bwa/sampe/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bwa/sampe/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf new file mode 100644 index 00000000..7a724908 --- /dev/null +++ b/modules/bwa/sampe/main.nf @@ -0,0 +1,46 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BWA_SAMPE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" + } else { + container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" + } + + input: + tuple val(meta), path(reads), path(sai) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def read_group = meta.read_group ? "-r ${meta.read_group}" : "" + + """ + INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` + + bwa sampe \\ + $options.args \\ + $read_group \\ + \$INDEX \\ + $sai \\ + $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam + + echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bwa/sampe/meta.yml b/modules/bwa/sampe/meta.yml new file mode 100644 index 00000000..6dc1bcc5 --- /dev/null +++ b/modules/bwa/sampe/meta.yml @@ -0,0 +1,58 @@ +name: bwa_sampe +description: Convert paired-end bwa SA coordinate files to SAM format +keywords: + - bwa + - aln + - short-read + - align + - reference + - fasta + - map + - sam + - bam +tools: + - bwa: + description: | + BWA is a software package for mapping DNA sequences against + a large reference genome, such as the human genome. + homepage: http://bio-bwa.sourceforge.net/ + documentation: http://bio-bwa.sourceforge.net/ + doi: "10.1093/bioinformatics/btp324" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information. + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ files specified alongside meta in input channel. + pattern: "*.{fastq,fq}.gz" + - sai: + type: file + description: SAI file specified alongside meta and reads in input channel. + pattern: "*.sai" + - index: + type: directory + description: Directory containing BWA index files (amb,ann,bwt,pac,sa) from BWA_INDEX + pattern: "bwa/" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: BAM file + pattern: "*.bam" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 150549d3..2d410e7f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -170,6 +170,10 @@ bwa/mem: - modules/bwa/mem/** - tests/modules/bwa/mem/** +bwa/sampe: + - modules/bwa/sampe/** + - tests/modules/bwa/sampe/** + bwa/samse: - modules/bwa/samse/** - tests/modules/bwa/samse/** diff --git a/tests/modules/bwa/sampe/main.nf b/tests/modules/bwa/sampe/main.nf new file mode 100644 index 00000000..86b019b5 --- /dev/null +++ b/tests/modules/bwa/sampe/main.nf @@ -0,0 +1,20 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' addParams( options: [:] ) + +workflow test_bwa_sampe { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_ALN ( input, BWA_INDEX.out.index ) + BWA_SAMPE ( BWA_ALN.out.sai, BWA_INDEX.out.index ) +} diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml new file mode 100644 index 00000000..ba5e704d --- /dev/null +++ b/tests/modules/bwa/sampe/test.yml @@ -0,0 +1,8 @@ +- name: bwa sampe + command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c tests/config/nextflow.config + tags: + - bwa + - bwa/sampe + files: + - path: output/bwa/test.bam + md5sum: f6ad85d66d44c5d26e692109d2e34100 From 6913da9d2dd73b20c6c939a77ee174165001eac2 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 3 Aug 2021 16:24:19 +0200 Subject: [PATCH 039/314] module: MALT/BUILD (#645) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Start MALT-build * Start MALT build (missing meta files and outputs specS) * Local tests * Correct test map_type * Finished module, just waiting for UNZIP module to finalise tests * Correct tests in preparation for Unzip * Ouptut log file too * Update meta.yml * Rename log file * Rename log file * Remove debugging stuff * Add Unzip module * Linting update * Linting update * Fix input db * Fix db file in cmd * Update modules/malt/build/main.nf * Update modules/malt/build/main.nf * Update main.nf --- .nf-core.yml | 1 + modules/malt/build/functions.nf | 68 ++++++++++++++++++++++++++ modules/malt/build/main.nf | 58 ++++++++++++++++++++++ modules/malt/build/meta.yml | 55 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/malt/build | 1 + tests/modules/malt/build_test/main.nf | 26 ++++++++++ tests/modules/malt/build_test/test.yml | 48 ++++++++++++++++++ 8 files changed, 261 insertions(+) create mode 100644 modules/malt/build/functions.nf create mode 100644 modules/malt/build/main.nf create mode 100644 modules/malt/build/meta.yml create mode 120000 tests/modules/malt/build create mode 100644 tests/modules/malt/build_test/main.nf create mode 100644 tests/modules/malt/build_test/test.yml diff --git a/.nf-core.yml b/.nf-core.yml index 72971af8..4f3bae33 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -7,3 +7,4 @@ bump-versions: rseqc/inferexperiment: False rseqc/innerdistance: False sortmerna: False + malt/build: False diff --git a/modules/malt/build/functions.nf b/modules/malt/build/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/malt/build/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf new file mode 100644 index 00000000..efadf9cf --- /dev/null +++ b/modules/malt/build/main.nf @@ -0,0 +1,58 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MALT_BUILD { + + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + // Do not **auto-bump** due to problem with change of version numbering between 0.4.1 and 0.5.2 + // (originally 0.4.1 was listed as 0.41, so is always selected as 'latest' even though it is not!) + conda (params.enable_conda ? "bioconda::malt=0.5.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/malt:0.5.2--0" + } else { + container "quay.io/biocontainers/malt:0.5.2--0" + } + + input: + path fastas + val seq_type + path gff + path map_db + + output: + path "malt_index/" , emit: index + path "*.version.txt" , emit: version + path "malt-build.log", emit: log + + script: + def software = getSoftwareName(task.process) + def avail_mem = 6 + if (!task.memory) { + log.info '[MALT_BUILD] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + def igff = gff ? "-igff ${gff}" : "" + + """ + malt-build \\ + -J-Xmx${avail_mem}g \\ + -v \\ + --input ${fastas.join(' ')} \\ + -s $seq_type \\ + $igff \\ + -d 'malt_index/' \\ + -t ${task.cpus} \\ + $options.args \\ + -mdb ${map_db}/*.db |&tee malt-build.log + + malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2 > ${software}.version.txt + """ +} diff --git a/modules/malt/build/meta.yml b/modules/malt/build/meta.yml new file mode 100644 index 00000000..5ace4d29 --- /dev/null +++ b/modules/malt/build/meta.yml @@ -0,0 +1,55 @@ +name: malt_build +description: MALT, an acronym for MEGAN alignment tool, is a sequence alignment and analysis tool designed for processing high-throughput sequencing data, especially in the context of metagenomics. +keywords: + - malt + - alignment + - metagenomics + - ancient DNA + - aDNA + - palaeogenomics + - archaeogenomics + - microbiome + - database +tools: + - malt: + description: A tool for mapping metagenomic data + homepage: https://www.wsi.uni-tuebingen.de/lehrstuehle/algorithms-in-bioinformatics/software/malt/ + documentation: https://software-ab.informatik.uni-tuebingen.de/download/malt/manual.pdf + tool_dev_url: None + doi: "10.1038/s41559-017-0446-6" + licence: ['GPL v3'] + +input: + - fastas: + type: file + description: Directory of, or FASTA reference files for indexing + pattern: "*/|*.fasta" + - seq_type: + type: string + description: Type of input data + pattern: "DNA|Protein" + - gff: + type: file + description: Directory of, or GFF3 files of input FASTA files + pattern: "*/|*.gff|*.gff3" + - map_db: + type: file + description: MEGAN .db file from https://software-ab.informatik.uni-tuebingen.de/download/megan6/welcome.html + pattern: + +output: + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - index: + type: directory + description: Directory containing MALT database index directory + pattern: "malt_index/" + - log: + type: file + description: Log file from STD out of malt-build + pattern: "malt-build.log" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2d410e7f..dffe106a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,6 +474,10 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +malt/build: + - modules/malt/build/** + - tests/modules/malt/build_test/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/malt/build b/tests/modules/malt/build new file mode 120000 index 00000000..942cadb7 --- /dev/null +++ b/tests/modules/malt/build @@ -0,0 +1 @@ +build_test/ \ No newline at end of file diff --git a/tests/modules/malt/build_test/main.nf b/tests/modules/malt/build_test/main.nf new file mode 100644 index 00000000..b2f3eaf6 --- /dev/null +++ b/tests/modules/malt/build_test/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) + +workflow test_malt_build { + fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + seq_type = "DNA" + gff = [] + map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + + UNZIP ( map_db ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) +} + +workflow test_malt_build_gff { + fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + seq_type = "DNA" + gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) + map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + + UNZIP ( map_db ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) +} diff --git a/tests/modules/malt/build_test/test.yml b/tests/modules/malt/build_test/test.yml new file mode 100644 index 00000000..23ca4550 --- /dev/null +++ b/tests/modules/malt/build_test/test.yml @@ -0,0 +1,48 @@ +- name: malt build + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c tests/config/nextflow.config + tags: + - malt + - malt/build + files: + - path: output/malt/malt_index/index0.idx + md5sum: 1954f2c00b418d00112829b0a6adb8ce + - path: output/malt/malt_index/ref.db + md5sum: 772a09aeb162515485b037604399f2bd + - path: output/malt/malt_index/ref.idx + md5sum: 7dea362b3fac8e00956a4952a3d4f474 + - path: output/malt/malt_index/ref.inf + md5sum: b146842067cf278ef1d23e6c2e7c0c35 + - path: output/malt/malt_index/table0.db + - path: output/malt/malt_index/table0.idx + - path: output/malt/malt_index/taxonomy.idx + md5sum: bb335e7c378a5bd85761b6eeed16d984 + - path: output/malt/malt_index/taxonomy.map + md5sum: ae2ea08b2119eba932a9cbcd9e634917 + - path: output/malt/malt_index/taxonomy.tre + md5sum: 511ec8ff4fd8aaa20d59b5a91ed4e852 +- name: malt build gff + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c tests/config/nextflow.config + tags: + - malt + - malt/build + files: + - path: output/malt/malt_index/aadd.dbx + md5sum: 4e2ed57e713d5372bd09350f447cdf53 + - path: output/malt/malt_index/aadd.idx + md5sum: 0994061bc8673ebd283fa6546c3dd12c + - path: output/malt/malt_index/index0.idx + md5sum: 1954f2c00b418d00112829b0a6adb8ce + - path: output/malt/malt_index/ref.db + md5sum: 772a09aeb162515485b037604399f2bd + - path: output/malt/malt_index/ref.idx + md5sum: 7dea362b3fac8e00956a4952a3d4f474 + - path: output/malt/malt_index/ref.inf + md5sum: b146842067cf278ef1d23e6c2e7c0c35 + - path: output/malt/malt_index/table0.db + - path: output/malt/malt_index/table0.idx + - path: output/malt/malt_index/taxonomy.idx + md5sum: bb335e7c378a5bd85761b6eeed16d984 + - path: output/malt/malt_index/taxonomy.map + md5sum: ae2ea08b2119eba932a9cbcd9e634917 + - path: output/malt/malt_index/taxonomy.tre + md5sum: 511ec8ff4fd8aaa20d59b5a91ed4e852 From 292e8eceb9616b5b85df28fd72231a3aa73d50a5 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 4 Aug 2021 11:06:08 +0200 Subject: [PATCH 040/314] module: MALT/RUN (#646) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add MALT with incomplete tests * Parameter typo fix * Clean up test yaml * Finish MALT module prior UNZIP and MALT_BUILD modiules * Add required modules for tests * Sync test out with malt-build * Fix input parameters in tests based on final build module * Update modules/malt/run/meta.yml Co-authored-by: Gregor Sturm Co-authored-by: Gregor Sturm --- modules/malt/run/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/malt/run/main.nf | 54 ++++++++++++++++++++++++++ modules/malt/run/meta.yml | 53 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/malt/run/main.nf | 21 ++++++++++ tests/modules/malt/run/test.yml | 11 ++++++ 6 files changed, 211 insertions(+) create mode 100644 modules/malt/run/functions.nf create mode 100644 modules/malt/run/main.nf create mode 100644 modules/malt/run/meta.yml create mode 100644 tests/modules/malt/run/main.nf create mode 100644 tests/modules/malt/run/test.yml diff --git a/modules/malt/run/functions.nf b/modules/malt/run/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/malt/run/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf new file mode 100644 index 00000000..7b327d6e --- /dev/null +++ b/modules/malt/run/main.nf @@ -0,0 +1,54 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MALT_RUN { + + label 'process_high_memory' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::malt=0.5.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/malt:0.5.2--0" + } else { + container "quay.io/biocontainers/malt:0.5.2--0" + } + + input: + path fastqs + val mode + path index + + output: + path "*.rma6" , emit: rma6 + path "*.{tab,text,sam}", optional:true, emit: alignments + path "*.log" , emit: log + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def avail_mem = 6 + if (!task.memory) { + log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + + """ + malt-run \\ + -J-Xmx${avail_mem}g \\ + -t ${task.cpus} \\ + -v \\ + -o . \\ + $options.args \\ + --inFile ${fastqs.join(' ')} \\ + -m $mode \\ + --index $index/ |&tee malt-run.log + + echo \$(malt-run --help 2>&1) | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ' > ${software}.version.txt + """ +} diff --git a/modules/malt/run/meta.yml b/modules/malt/run/meta.yml new file mode 100644 index 00000000..30421a48 --- /dev/null +++ b/modules/malt/run/meta.yml @@ -0,0 +1,53 @@ +name: malt_run +description: MALT, an acronym for MEGAN alignment tool, is a sequence alignment and analysis tool designed for processing high-throughput sequencing data, especially in the context of metagenomics. +keywords: + - malt + - alignment + - metagenomics + - ancient DNA + - aDNA + - palaeogenomics + - archaeogenomics + - microbiome +tools: + - malt: + description: A tool for mapping metagenomic data + homepage: https://www.wsi.uni-tuebingen.de/lehrstuehle/algorithms-in-bioinformatics/software/malt/ + documentation: https://software-ab.informatik.uni-tuebingen.de/download/malt/manual.pdf + tool_dev_url: None + doi: "10.1038/s41559-017-0446-6" + licence: ['GPL v3'] + +input: + - fastqs: + type: file + description: Input FASTQ files + pattern: "*.{fastq.gz,fq.gz}" + - mode: + type: string + description: Program mode + pattern: 'Unknown|BlastN|BlastP|BlastX|Classifier' + - index: + type: directory + description: Index/database directory from malt-build + pattern: '*/' +output: + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - rma6: + type: file + description: MEGAN6 RMA6 file + pattern: "*.rma6" + - sam: + type: file + description: Alignment files in Tab, Text or MEGAN-compatible SAM format + pattern: "*.{tab,txt,sam}" + - log: + type: file + description: Log of verbose MALT stdout + pattern: "malt-run.log" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index dffe106a..39b267c7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -478,6 +478,10 @@ malt/build: - modules/malt/build/** - tests/modules/malt/build_test/** +malt/run: + - modules/malt/run/** + - tests/modules/malt/run/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf new file mode 100644 index 00000000..6292ca61 --- /dev/null +++ b/tests/modules/malt/run/main.nf @@ -0,0 +1,21 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { MALT_RUN } from '../../../../modules/malt/run/main.nf' addParams( options: [:] ) + +workflow test_malt_run { + + fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) + seq_type = "DNA" + map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + mode = "BlastN" + + UNZIP ( map_db ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) + MALT_RUN ( input, mode, MALT_BUILD.out.index ) +} diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml new file mode 100644 index 00000000..cc6f5fac --- /dev/null +++ b/tests/modules/malt/run/test.yml @@ -0,0 +1,11 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core modules create-test-yml malt/run +- name: malt run + command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c tests/config/nextflow.config + tags: + - malt + - malt/run + files: + - path: output/malt/test_1.rma6 + - path: output/malt/malt-run.log + From 5de3f2c50e063bda30316342b6606b485a12e38c Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Wed, 4 Aug 2021 11:16:51 +0200 Subject: [PATCH 041/314] Added PL tag in read group information for downstream analysis (#649) Co-authored-by: Maxime U. Garcia --- modules/star/align/main.nf | 11 ++++++----- tests/modules/star/align/main.nf | 10 +++------- tests/modules/star/align/test.yml | 15 +++++++-------- 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index c06daf24..d5d88ce8 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -39,11 +39,12 @@ process STAR_ALIGN { tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" - def seq_center = params.seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$params.seq_center' 'SM:$prefix'" : "--outSAMattrRGline ID:$prefix 'SM:$prefix'" - def out_sam_type = (options.args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" + def seq_platform = params.seq_platform ? "'PL:$params.seq_platform'" : "" + def seq_center = params.seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$params.seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " + def out_sam_type = (options.args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' def mv_unsorted_bam = (options.args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' """ STAR \\ diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index ff278efd..d7a7ef96 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9'] ) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'] ) -include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9']) +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'], seq_platform: 'illumina') +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'], seq_platform: 'illumina') include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30'] ) workflow test_star_alignment_single_end { @@ -13,7 +13,6 @@ workflow test_star_alignment_single_end { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STAR_GENOMEGENERATE ( fasta, gtf ) STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) } @@ -25,7 +24,6 @@ workflow test_star_alignment_paired_end { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STAR_GENOMEGENERATE ( fasta, gtf ) STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) } @@ -38,7 +36,6 @@ workflow test_star_alignment_paired_end_for_fusion { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STAR_GENOMEGENERATE ( fasta, gtf ) STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) } @@ -50,7 +47,6 @@ workflow test_star_alignment_paired_end_for_starfusion { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STAR_GENOMEGENERATE ( fasta, gtf ) STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf ) } diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 1d3b548d..79ab38f1 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -1,8 +1,8 @@ - name: star align test_star_alignment_single_end command: nextflow run tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config tags: - - star - star/align + - star files: - path: output/index/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da @@ -36,7 +36,7 @@ - path: output/index/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: 509d7f1fba3350913c8ea13f01917085 + md5sum: b9f5e2f6a624b64c300fe25dc3ac801f - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out @@ -45,8 +45,8 @@ - name: star align test_star_alignment_paired_end command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config tags: - - star - star/align + - star files: - path: output/index/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da @@ -80,8 +80,7 @@ - path: output/index/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: 64b408fb1d61e2de8ff51c847cd5bc52 - - path: output/star/test.Log.final.out + md5sum: 38d08f0b944a2a1b981a250d675aa0d9 - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab @@ -89,8 +88,8 @@ - name: star align test_star_alignment_paired_end_for_fusion command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c tests/config/nextflow.config tags: - - star - star/align + - star files: - path: output/index/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da @@ -124,7 +123,7 @@ - path: output/index/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: d724ca90a102347b9c5052a33ea4d308 + md5sum: c740d5177067c1fcc48ab7a16cd639d7 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out @@ -133,8 +132,8 @@ - name: star align test_star_alignment_paired_end_for_starfusion command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c tests/config/nextflow.config tags: - - star - star/align + - star files: - path: output/index/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da From 28b023e6f4d0d2745406d9dc6e38006882804e67 Mon Sep 17 00:00:00 2001 From: Christian Mertes Date: Thu, 5 Aug 2021 19:59:56 +0200 Subject: [PATCH 042/314] Require r-base>=3.5 for RSeqC Conda envs (#654) * require conda-forge::r-base>=3.5 for conda-envs * have same conda requirements for all rseqc submodules --- modules/rseqc/bamstat/main.nf | 2 +- modules/rseqc/inferexperiment/main.nf | 2 +- modules/rseqc/innerdistance/main.nf | 2 +- modules/rseqc/junctionannotation/main.nf | 2 +- modules/rseqc/junctionsaturation/main.nf | 2 +- modules/rseqc/readdistribution/main.nf | 2 +- modules/rseqc/readduplication/main.nf | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index ac80b6d0..913c3f53 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -11,7 +11,7 @@ process RSEQC_BAMSTAT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index da0958d4..a887e6e6 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -11,7 +11,7 @@ process RSEQC_INFEREXPERIMENT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 2688fca6..e2e8f909 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -11,7 +11,7 @@ process RSEQC_INNERDISTANCE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index ace4fe61..30bdcd11 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -11,7 +11,7 @@ process RSEQC_JUNCTIONANNOTATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index e6e21638..837006d0 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -11,7 +11,7 @@ process RSEQC_JUNCTIONSATURATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index a6ed6c9f..1b09908e 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -11,7 +11,7 @@ process RSEQC_READDISTRIBUTION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index 6fb06f63..c86b05b6 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -11,7 +11,7 @@ process RSEQC_READDUPLICATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::rseqc=3.0.1" : null) + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" } else { From 67cc3bd116a4dc56af79a8b84511f18b96fcad5c Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Tue, 10 Aug 2021 15:10:27 +0200 Subject: [PATCH 043/314] Add bcftools/concat module. (#641) * draft for bcftools modules [ci skip] * initial test for bcftools concat * Update the params for testing * fix tests * Accomodate code review [ci skip] Co-authored-by: James A. Fellows Yates * Update the meta file and open PR for review * Update the keyword * Update the tags for module [ci skip[ * add threads Co-authored-by: James A. Fellows Yates --- modules/bcftools/concat/functions.nf | 68 ++++++++++++++++++++++++++ modules/bcftools/concat/main.nf | 40 +++++++++++++++ modules/bcftools/concat/meta.yml | 42 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/concat/main.nf | 16 ++++++ tests/modules/bcftools/concat/test.yml | 8 +++ 6 files changed, 178 insertions(+) create mode 100644 modules/bcftools/concat/functions.nf create mode 100644 modules/bcftools/concat/main.nf create mode 100644 modules/bcftools/concat/meta.yml create mode 100644 tests/modules/bcftools/concat/main.nf create mode 100644 tests/modules/bcftools/concat/test.yml diff --git a/modules/bcftools/concat/functions.nf b/modules/bcftools/concat/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bcftools/concat/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf new file mode 100644 index 00000000..0266f4f0 --- /dev/null +++ b/modules/bcftools/concat/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_CONCAT { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" + } else { + container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" + } + + input: + tuple val(meta), path(vcfs) + + output: + tuple val(meta), path("*.gz"), emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + bcftools concat \\ + --output ${prefix}.vcf.gz \\ + $options.args \\ + --threads $task.cpus \\ + ${vcfs} + + echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bcftools/concat/meta.yml b/modules/bcftools/concat/meta.yml new file mode 100644 index 00000000..566e6dba --- /dev/null +++ b/modules/bcftools/concat/meta.yml @@ -0,0 +1,42 @@ +name: bcftools_concat +description: Concatenate VCF files +keywords: + - variant calling + - concat + - bcftools + - VCF + +tools: + - concat: + description: | + Concatenate VCF files. + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: http://www.htslib.org/doc/bcftools.html + doi: 10.1093/bioinformatics/btp352 +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcfs: + type: files + description: | + List containing 2 or more vcf files + e.g. [ 'file1.vcf', 'file2.vcf' ] +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF concatenated output file + pattern: "*.{vcf.gz}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 39b267c7..dc9933d9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -30,6 +30,10 @@ bbmap/bbduk: - modules/bbmap/bbduk/** - tests/modules/bbmap/bbduk/** +bcftools/concat: + - modules/bcftools/concat/** + - tests/modules/bcftools/concat/** + bcftools/consensus: - modules/bcftools/consensus/** - tests/modules/bcftools/consensus/** diff --git a/tests/modules/bcftools/concat/main.nf b/tests/modules/bcftools/concat/main.nf new file mode 100644 index 00000000..8869a3d7 --- /dev/null +++ b/tests/modules/bcftools/concat/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' addParams( options: ['args': '--no-version'] ) + +workflow test_bcftools_concat { + + input = [ [ id:'test3' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true) ] + ] + + + BCFTOOLS_CONCAT ( input ) +} diff --git a/tests/modules/bcftools/concat/test.yml b/tests/modules/bcftools/concat/test.yml new file mode 100644 index 00000000..413fe798 --- /dev/null +++ b/tests/modules/bcftools/concat/test.yml @@ -0,0 +1,8 @@ +- name: bcftools concat test_bcftools_concat + command: nextflow run tests/modules/bcftools/concat -entry test_bcftools_concat -c tests/config/nextflow.config + tags: + - bcftools/concat + - bcftools + files: + - path: output/bcftools/test3.vcf.gz + md5sum: c400c7458524d889e0967b06ed72534f From 653e9e05b1c48bf0e72ded63940586f7bc8b714f Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 16 Aug 2021 03:51:30 -0600 Subject: [PATCH 044/314] add module for dragonflye (#633) * add module for dragonflye * fix tests for dragonflye * Update test.yml * Update meta.yml * Update main.nf * Update main.nf * Update modules/dragonflye/meta.yml Co-authored-by: Gregor Sturm --- modules/dragonflye/functions.nf | 68 +++++++++++++++++++++++++++++++ modules/dragonflye/main.nf | 45 ++++++++++++++++++++ modules/dragonflye/meta.yml | 57 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/dragonflye/main.nf | 22 ++++++++++ tests/modules/dragonflye/test.yml | 25 ++++++++++++ 6 files changed, 221 insertions(+) create mode 100644 modules/dragonflye/functions.nf create mode 100644 modules/dragonflye/main.nf create mode 100644 modules/dragonflye/meta.yml create mode 100644 tests/modules/dragonflye/main.nf create mode 100644 tests/modules/dragonflye/test.yml diff --git a/modules/dragonflye/functions.nf b/modules/dragonflye/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/dragonflye/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf new file mode 100644 index 00000000..cd0195e9 --- /dev/null +++ b/modules/dragonflye/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DRAGONFLYE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::dragonflye=1.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0" + } else { + container "quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("contigs.fa") , emit: contigs + tuple val(meta), path("dragonflye.log") , emit: log + tuple val(meta), path("{flye,miniasm,raven}.fasta") , emit: raw_contigs + tuple val(meta), path("{miniasm,raven}-unpolished.gfa"), optional:true , emit: gfa + tuple val(meta), path("flye-info.txt"), optional:true , emit: txt + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def memory = task.memory.toGiga() + """ + dragonflye \\ + --reads ${reads} \\ + $options.args \\ + --cpus $task.cpus \\ + --ram $memory \\ + --outdir ./ \\ + --force + echo \$(dragonflye --version 2>&1) | sed 's/^.*dragonflye //' > ${software}.version.txt + """ +} diff --git a/modules/dragonflye/meta.yml b/modules/dragonflye/meta.yml new file mode 100644 index 00000000..a2bf2703 --- /dev/null +++ b/modules/dragonflye/meta.yml @@ -0,0 +1,57 @@ +name: dragonflye +description: Assemble bacterial isolate genomes from Nanopore reads +keywords: + - bacterial + - assembly + - nanopore + +tools: + - dragonflye: + description: Microbial assembly pipeline for Nanopore reads + homepage: https://github.com/rpetit3/dragonflye + documentation: https://github.com/rpetit3/dragonflye/blob/main/README.md + licence: ['GPL v2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Input Nanopore FASTQ file + pattern: "*.fastq.gz" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - contigs: + type: file + description: The final assembly produced by Dragonflye + pattern: "contigs.fa" + - log: + type: file + description: Full log file for bug reporting + pattern: "dragonflye.log" + - raw_contigs: + type: file + description: Raw assembly produced by the assembler (Flye, Miniasm, or Raven) + pattern: "{flye,miniasm,raven}.fasta" + - txt: + type: file + description: Assembly information output by Flye + pattern: "flye-info.txt" + - gfa: + type: file + description: Assembly graph produced by Miniasm, or Raven + pattern: "{miniasm,raven}-unpolished.gfa" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index dc9933d9..a4a1a460 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -242,6 +242,10 @@ delly/call: - modules/delly/call/** - tests/modules/delly/call/** +dragonflye: + - modules/dragonflye/** + - tests/modules/dragonflye/** + dshbio/filterbed: - modules/dshbio/filterbed/** - tests/modules/dshbio/filterbed/** diff --git a/tests/modules/dragonflye/main.nf b/tests/modules/dragonflye/main.nf new file mode 100644 index 00000000..4d3ac6e5 --- /dev/null +++ b/tests/modules/dragonflye/main.nf @@ -0,0 +1,22 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler miniasm --gsize 5000000'] ) +include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler raven --gsize 5000000'] ) + +workflow test_dragonflye { + input = [ [ id:'test', single_end:true ], // meta map + [ file("https://github.com/nf-core/test-datasets/raw/bacass/nanopore/subset15000.fq.gz", checkIfExists: true) ] + ] + + DRAGONFLYE ( input ) +} + +workflow test_dragonflye_raven { + input = [ [ id:'test', single_end:true ], // meta map + [ file("https://github.com/nf-core/test-datasets/raw/bacass/nanopore/subset15000.fq.gz", checkIfExists: true) ] + ] + + DRAGONFLYE_RAVEN ( input ) +} diff --git a/tests/modules/dragonflye/test.yml b/tests/modules/dragonflye/test.yml new file mode 100644 index 00000000..fe6283c0 --- /dev/null +++ b/tests/modules/dragonflye/test.yml @@ -0,0 +1,25 @@ +- name: dragonflye with miniasm + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c tests/config/nextflow.config + tags: + - dragonflye + files: + - path: output/dragonflye/miniasm.fasta + md5sum: 6b8903ba09592df99f43ed05fda488f6 + - path: output/dragonflye/miniasm-unpolished.gfa + md5sum: 40ab03a417eafab0cb4ac2c32bd006e1 + # MD5sum not reproducible (timestamp, contig order) + - path: output/dragonflye/contigs.fa + - path: output/dragonflye/dragonflye.log + +- name: dragonflye with raven + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c tests/config/nextflow.config + tags: + - dragonflye + files: + - path: output/dragonflye/raven.fasta + md5sum: bd4ba5b0dda110a7ccbea9581c97a898 + - path: output/dragonflye/raven-unpolished.gfa + md5sum: 62c21791dbf9b2c7375dc52d7bab5be2 + # MD5sum not reproducible (timestamp, contig order) + - path: output/dragonflye/contigs.fa + - path: output/dragonflye/dragonflye.log From 6e68c1af9a514bb056c0513ebba6764efd6750fc Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 16 Aug 2021 05:57:22 -0400 Subject: [PATCH 045/314] update typos. change quote from ' to ". (#652) --- modules/cooler/digest/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index 399541d9..bb4081d9 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -5,7 +5,7 @@ params.options = [:] options = initOptions(params.options) process COOLER_DIGEST { - tag '$fasta' + tag "$fasta" label 'process_medium' publishDir "${params.outdir}", mode: params.publish_dir_mode, @@ -32,7 +32,7 @@ process COOLER_DIGEST { """ cooler digest \\ $options.args \\ - -o "${fasta.baseName}_${enzyme.replaceAll(/[^0-9a-zA-Z]+/, "_")}.bed" \\ + -o "${fasta.baseName}_${enzyme.replaceAll(/[^0-9a-zA-Z]+/, '_')}.bed" \\ $chromsizes \\ $fasta \\ $enzyme From 0954204f9e2aa22056c7d9c58f2993fe5c496db8 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Mon, 16 Aug 2021 17:40:03 +0200 Subject: [PATCH 046/314] Add bcftools/norm module (#655) * Initial draft [ci skip] * trigger first test * update output file path * Tests passing * finishing touches for meta.yml and update checksum * tweak checksum * add threads to the module * skip version info for matching test md5sum [ci skip] * Add ref fasta and finalize the module Co-authored-by: Gregor Sturm --- modules/bcftools/norm/functions.nf | 68 ++++++++++++++++++++++++++++ modules/bcftools/norm/main.nf | 42 +++++++++++++++++ modules/bcftools/norm/meta.yml | 45 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/norm/main.nf | 15 ++++++ tests/modules/bcftools/norm/test.yml | 8 ++++ 6 files changed, 182 insertions(+) create mode 100644 modules/bcftools/norm/functions.nf create mode 100644 modules/bcftools/norm/main.nf create mode 100644 modules/bcftools/norm/meta.yml create mode 100644 tests/modules/bcftools/norm/main.nf create mode 100644 tests/modules/bcftools/norm/test.yml diff --git a/modules/bcftools/norm/functions.nf b/modules/bcftools/norm/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bcftools/norm/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf new file mode 100644 index 00000000..5d8a7c3c --- /dev/null +++ b/modules/bcftools/norm/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_NORM { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" + } else { + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" + } + + input: + tuple val(meta), path(vcf) + path(fasta) + + output: + tuple val(meta), path("*.gz") , emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + bcftools norm \\ + --fasta-ref ${fasta} \\ + --output ${prefix}.vcf.gz \\ + $options.args \\ + --threads $task.cpus \\ + ${vcf} + + echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bcftools/norm/meta.yml b/modules/bcftools/norm/meta.yml new file mode 100644 index 00000000..abeb8904 --- /dev/null +++ b/modules/bcftools/norm/meta.yml @@ -0,0 +1,45 @@ +name: bcftools_norm +description: Normalize VCF file +keywords: + - normalize + - norm + - variant calling + - VCF +tools: + - norm: + description: | + Normalize VCF files. + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: http://www.htslib.org/doc/bcftools.html + doi: 10.1093/bioinformatics/btp352 +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: | + The vcf file to be normalized + e.g. 'file1.vcf' + - fasta: + type: file + description: FASTA reference file + pattern: "*.{fasta,fa}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF normalized output file + pattern: "*.{vcf.gz}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a4a1a460..0482c672 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -54,6 +54,10 @@ bcftools/mpileup: - modules/bcftools/mpileup/** - tests/modules/bcftools/mpileup/** +bcftools/norm: + - modules/bcftools/norm/** + - tests/modules/bcftools/norm/** + bcftools/reheader: - modules/bcftools/reheader/** - tests/modules/bcftools/reheader/** diff --git a/tests/modules/bcftools/norm/main.nf b/tests/modules/bcftools/norm/main.nf new file mode 100644 index 00000000..046c0b3c --- /dev/null +++ b/tests/modules/bcftools/norm/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' addParams( options: ['args': '-m -any --no-version'] ) + +workflow test_bcftools_norm { + + input = [ [ id:'test2', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true)] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BCFTOOLS_NORM ( input, fasta ) +} diff --git a/tests/modules/bcftools/norm/test.yml b/tests/modules/bcftools/norm/test.yml new file mode 100644 index 00000000..40d0cc7e --- /dev/null +++ b/tests/modules/bcftools/norm/test.yml @@ -0,0 +1,8 @@ +- name: bcftools norm + command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/norm + files: + - path: output/bcftools/test2.vcf.gz + md5sum: 2b1cac07d1875b8adcd7a85346890f07 From b261c1f549db29574c880f65a15e70ada4864eb2 Mon Sep 17 00:00:00 2001 From: Anders Jemt Date: Mon, 16 Aug 2021 17:47:42 +0200 Subject: [PATCH 047/314] Expansionhunter (#666) Please enter the commit message for your changes. Lines starting * adds expansionhunter module Co-authored-by: Maxime U. Garcia --- modules/expansionhunter/functions.nf | 68 ++++++++++++++++++++++++++ modules/expansionhunter/main.nf | 45 +++++++++++++++++ modules/expansionhunter/meta.yml | 50 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/expansionhunter/main.nf | 17 +++++++ tests/modules/expansionhunter/test.yml | 7 +++ 7 files changed, 192 insertions(+) create mode 100644 modules/expansionhunter/functions.nf create mode 100644 modules/expansionhunter/main.nf create mode 100644 modules/expansionhunter/meta.yml create mode 100644 tests/modules/expansionhunter/main.nf create mode 100644 tests/modules/expansionhunter/test.yml diff --git a/modules/expansionhunter/functions.nf b/modules/expansionhunter/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/expansionhunter/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf new file mode 100644 index 00000000..41c6ed6c --- /dev/null +++ b/modules/expansionhunter/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process EXPANSIONHUNTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::expansionhunter=4.0.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0" + } else { + container "quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0" + } + + input: + tuple val(meta), path(bam), path(bai) + path fasta + path variant_catalog + + output: + tuple val(meta), path("*.vcf"), emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" + """ + ExpansionHunter \\ + $options.args \\ + --reads $bam \\ + --output-prefix $prefix \\ + --reference $fasta \\ + --variant-catalog $variant_catalog \\ + --sex $gender + + echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter //' > ${software}.version.txt + """ +} diff --git a/modules/expansionhunter/meta.yml b/modules/expansionhunter/meta.yml new file mode 100644 index 00000000..cac3ed2b --- /dev/null +++ b/modules/expansionhunter/meta.yml @@ -0,0 +1,50 @@ +name: expansionhunter +description: write your description here +keywords: + - STR + - repeat_expansions +tools: + - expansionhunter: + description: A tool for estimating repeat sizes + homepage: https://github.com/Illumina/ExpansionHunter + documentation: https://github.com/Illumina/ExpansionHunter/blob/master/docs/01_Introduction.md + tool_dev_url: None + doi: "10.1093/bioinformatics/btz431" + licence: ['Apache v2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM file + pattern: "*.{bam,cram}" + - fasta: + type: file + description: Reference genome + pattern: "*.{fa,fasta}" + - variant_catalog: + type: file + description: json file with repeat expansion sites to genotype + pattern: "*.{json}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', gender:'female' ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - vcf: + type: file + description: VCF with repeat expansions + pattern: "*.{vcf}" + +authors: + - "@jemten" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 0482c672..a65879cb 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -270,6 +270,10 @@ ensemblvep: - modules/ensemblvep/** - tests/modules/ensemblvep/** +expansionhunter: + - modules/expansionhunter/** + - tests/modules/expansionhunter/** + fastp: - modules/fastp/** - tests/modules/fastp/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 5643c364..73232088 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -107,6 +107,7 @@ params { mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" + repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } 'illumina' { test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" diff --git a/tests/modules/expansionhunter/main.nf b/tests/modules/expansionhunter/main.nf new file mode 100644 index 00000000..a7acbff4 --- /dev/null +++ b/tests/modules/expansionhunter/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' addParams( options: [:] ) + +workflow test_expansionhunter { + + input = [ [ id:'test', gender:'male' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + variant_catalog = file(params.test_data['homo_sapiens']['genome']['repeat_expansions'], checkIfExists: true) + + EXPANSIONHUNTER ( input, fasta, variant_catalog ) +} diff --git a/tests/modules/expansionhunter/test.yml b/tests/modules/expansionhunter/test.yml new file mode 100644 index 00000000..78d5c002 --- /dev/null +++ b/tests/modules/expansionhunter/test.yml @@ -0,0 +1,7 @@ +- name: expansionhunter test_expansionhunter + command: nextflow run tests/modules/expansionhunter -entry test_expansionhunter -c tests/config/nextflow.config + tags: + - expansionhunter + files: + - path: output/expansionhunter/test.vcf + md5sum: ef6c2101d7bd67211bb5a5a132690e02 From 69b21f0dc0d8330b6c18fb1a89e8b7ebbac4882d Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 17 Aug 2021 15:08:21 +0200 Subject: [PATCH 048/314] Update test.yml (#668) --- tests/modules/malt/run/test.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml index cc6f5fac..0c245f2f 100644 --- a/tests/modules/malt/run/test.yml +++ b/tests/modules/malt/run/test.yml @@ -1,5 +1,3 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml malt/run - name: malt run command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c tests/config/nextflow.config tags: From 6c633ef305b1839be98bde6ad3956758431206e1 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 17 Aug 2021 15:43:25 +0200 Subject: [PATCH 049/314] Specify in guidelines one should split CPUs when module has n > 1 tool (#660) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Describe CPU splitting * Update README.md Co-authored-by: Gregor Sturm * More CPU examples Co-authored-by: Gregor Sturm --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 02638474..f82e45f8 100644 --- a/README.md +++ b/README.md @@ -457,6 +457,10 @@ using a combination of `bwa` and `samtools` to output a BAM file instead of a SA - If the tool supports multi-threading then you MUST provide the appropriate parameter using the Nextflow `task` variable e.g. `--threads $task.cpus`. +- If a module contains _multiple_ tools that supports multi-threading (e.g. [piping output into a samtools command](https://github.com/nf-core/modules/blob/28b023e6f4d0d2745406d9dc6e38006882804e67/modules/bowtie2/align/main.nf#L32-L46)), you MUST assign cpus per tool such that the total number of used CPUs does not exceed `task.cpus`. + - For example, combining two (or more) tools that both (all) have multi-threading, this can be assigned to the variable [`split_cpus`](https://github.com/nf-core/modules/blob/28b023e6f4d0d2745406d9dc6e38006882804e67/modules/bowtie2/align/main.nf#L32) + - If one tool is multi-threaded and another uses a single thread, you can specify directly in the command itself e.g. with [`${task.cpus - 1}`](https://github.com/nf-core/modules/blob/6e68c1af9a514bb056c0513ebba6764efd6750fc/modules/bwa/sampe/main.nf#L42-L43) + #### Software requirements [BioContainers](https://biocontainers.pro/#/) is a registry of Docker and Singularity containers automatically created from all of the software packages on [Bioconda](https://bioconda.github.io/). Where possible we will use BioContainers to fetch pre-built software containers and Bioconda to install software using Conda. From bc7b5b3a125c14f8299720531641b25d3afaa4d5 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Tue, 17 Aug 2021 08:53:41 -0500 Subject: [PATCH 050/314] Add dsh-bio export-segments module (#631) Co-authored-by: Gregor Sturm --- modules/dshbio/exportsegments/functions.nf | 68 ++++++++++++++++++++ modules/dshbio/exportsegments/main.nf | 40 ++++++++++++ modules/dshbio/exportsegments/meta.yml | 40 ++++++++++++ tests/config/pytest_modules.yml | 6 +- tests/modules/dshbio/exportsegments/main.nf | 13 ++++ tests/modules/dshbio/exportsegments/test.yml | 8 +++ 6 files changed, 174 insertions(+), 1 deletion(-) create mode 100644 modules/dshbio/exportsegments/functions.nf create mode 100644 modules/dshbio/exportsegments/main.nf create mode 100644 modules/dshbio/exportsegments/meta.yml create mode 100644 tests/modules/dshbio/exportsegments/main.nf create mode 100644 tests/modules/dshbio/exportsegments/test.yml diff --git a/modules/dshbio/exportsegments/functions.nf b/modules/dshbio/exportsegments/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/dshbio/exportsegments/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf new file mode 100644 index 00000000..bf4c9699 --- /dev/null +++ b/modules/dshbio/exportsegments/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DSHBIO_EXPORTSEGMENTS { + tag "${meta.id}" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + } else { + container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + } + + input: + tuple val(meta), path(gfa) + + output: + tuple val(meta), path("*.fa"), emit: fasta + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + dsh-bio \\ + export-segments \\ + $options.args \\ + -i $gfa \\ + -o ${prefix}.fa + + echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + """ +} diff --git a/modules/dshbio/exportsegments/meta.yml b/modules/dshbio/exportsegments/meta.yml new file mode 100644 index 00000000..c064527e --- /dev/null +++ b/modules/dshbio/exportsegments/meta.yml @@ -0,0 +1,40 @@ +name: dshbio_exportsegments +description: Export assembly segment sequences in GFA 1.0 format to FASTA format +keywords: + - gfa + - assembly + - segment +tools: + - dshbio: + description: | + Reads, features, variants, assemblies, alignments, genomic range trees, pangenome + graphs, and a bunch of random command line tools for bioinformatics. LGPL version 3 + or later. + homepage: https://github.com/heuermh/dishevelled-bio + documentation: https://github.com/heuermh/dishevelled-bio +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gfa: + type: file + description: Assembly segments in GFA 1.0 format + pattern: "*.{gfa}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Assembly segment sequences in FASTA format + pattern: "*.{fa}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@heuermh" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a65879cb..08a11a4c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -245,11 +245,15 @@ deeptools/plotprofile: delly/call: - modules/delly/call/** - tests/modules/delly/call/** - + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** +dshbio/exportsegments: + - modules/dshbio/exportsegments/** + - tests/modules/dshbio/exportsegments/** + dshbio/filterbed: - modules/dshbio/filterbed/** - tests/modules/dshbio/filterbed/** diff --git a/tests/modules/dshbio/exportsegments/main.nf b/tests/modules/dshbio/exportsegments/main.nf new file mode 100644 index 00000000..6eef1046 --- /dev/null +++ b/tests/modules/dshbio/exportsegments/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' addParams( options: [:] ) + +workflow test_dshbio_exportsegments { + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['assembly_gfa'], checkIfExists: true) ] + ] + + DSHBIO_EXPORTSEGMENTS ( input ) +} diff --git a/tests/modules/dshbio/exportsegments/test.yml b/tests/modules/dshbio/exportsegments/test.yml new file mode 100644 index 00000000..453e1cba --- /dev/null +++ b/tests/modules/dshbio/exportsegments/test.yml @@ -0,0 +1,8 @@ +- name: dshbio exportsegments + command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c tests/config/nextflow.config + tags: + - dshbio + - dshbio/exportsegments + files: + - path: ./output/dshbio/test.fa + md5sum: 19ed0b69970ed3fbb641c5c510ebef61 From ab93a1afa7941dcf4fff8dc35712290077f25413 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 17 Aug 2021 15:55:54 +0200 Subject: [PATCH 051/314] update: `BWA/ALN` (#653) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Remove reads from output channel following module guidelines. Should do a .join() based on $meta, to reassociate. Co-authored-by: Gregor Sturm --- modules/bwa/aln/main.nf | 2 +- modules/bwa/aln/meta.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index d9c2ba13..59f1396c 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -23,7 +23,7 @@ process BWA_ALN { path index output: - tuple val(meta), path(reads), path("*.sai"), emit: sai + tuple val(meta), path("*.sai"), emit: sai path "*.version.txt" , emit: version script: diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml index eac1f509..4f81588d 100644 --- a/modules/bwa/aln/meta.yml +++ b/modules/bwa/aln/meta.yml @@ -47,7 +47,7 @@ output: pattern: "*.{version.txt}" - sai: type: file - description: SA coordinate file + description: Single or paired SA coordinate files pattern: "*.sai" authors: From d95be1434f90d945c508b3b9685a694de686c9a0 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Thu, 26 Aug 2021 15:33:38 -0500 Subject: [PATCH 052/314] Update seqwish reported version to match bioconda version. (#678) --- modules/seqwish/induce/main.nf | 2 +- modules/seqwish/induce/meta.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index defd86e2..ebf714ff 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -3,7 +3,7 @@ include { initOptions; saveFiles; getSoftwareName } from './functions' params.options = [:] options = initOptions(params.options) -def VERSION = '0.4.1' +def VERSION = '0.7.1' process SEQWISH_INDUCE { tag "$meta.id" diff --git a/modules/seqwish/induce/meta.yml b/modules/seqwish/induce/meta.yml index 0b6b4b5b..f357f0df 100644 --- a/modules/seqwish/induce/meta.yml +++ b/modules/seqwish/induce/meta.yml @@ -36,7 +36,7 @@ output: e.g. [ id:'test', single_end:false ] - gfa: type: file - description: Variation graph in GFA1 format + description: Variation graph in GFA 1.0 format pattern: "*.{gfa}" - version: type: file From 0f59b07945001b0238653500c606a5b1edfc3592 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Fri, 3 Sep 2021 09:28:28 +0200 Subject: [PATCH 053/314] Bbmap index (#683) BBMap index module --- modules/bbmap/index/functions.nf | 68 ++++++++++++++++++++++++++++++ modules/bbmap/index/main.nf | 39 +++++++++++++++++ modules/bbmap/index/meta.yml | 43 +++++++++++++++++++ tests/config/pytest_modules.yml | 6 ++- tests/modules/bbmap/index/main.nf | 13 ++++++ tests/modules/bbmap/index/test.yml | 12 ++++++ 6 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 modules/bbmap/index/functions.nf create mode 100644 modules/bbmap/index/main.nf create mode 100644 modules/bbmap/index/meta.yml create mode 100644 tests/modules/bbmap/index/main.nf create mode 100644 tests/modules/bbmap/index/test.yml diff --git a/modules/bbmap/index/functions.nf b/modules/bbmap/index/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bbmap/index/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf new file mode 100644 index 00000000..9e04881c --- /dev/null +++ b/modules/bbmap/index/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BBMAP_INDEX { + tag "$fasta" + label 'process_long' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bbmap=38.92" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0" + } else { + container "quay.io/biocontainers/bbmap:38.92--he522d1c_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path('ref') , emit: index + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + """ + bbmap.sh \\ + ref=${fasta} \\ + $options.args \\ + threads=$task.cpus \\ + -Xmx${task.memory.toGiga()}g + + echo \$(bbversion.sh) > ${software}.version.txt + """ +} diff --git a/modules/bbmap/index/meta.yml b/modules/bbmap/index/meta.yml new file mode 100644 index 00000000..f55b1a26 --- /dev/null +++ b/modules/bbmap/index/meta.yml @@ -0,0 +1,43 @@ +name: bbmap_index +description: This module calls bbmap.sh to create an index from a fasta file, ready to be used by bbmap.sh in mapping mode. +keywords: + - mapping + - index + - fasta +tools: + - bbmap: + description: BBMap is a short read aligner, as well as various other bioinformatic tools. + homepage: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + documentation: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + tool_dev_url: None + doi: "" + licence: ['UC-LBL license (see package)'] + +input: + - meta: + type: map + description: | + Groovy Map containing optional parameters to bbmap.sh + e.g. [ id:'test', single_end:false ] + - fasta: + type: fasta + description: fasta formatted file with nucleotide sequences + pattern: "*.{fna,fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - db: + type: directory + description: Directory with index files + pattern: "ref" + +authors: + - "@daniellundin" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 08a11a4c..a4e163c9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -30,6 +30,10 @@ bbmap/bbduk: - modules/bbmap/bbduk/** - tests/modules/bbmap/bbduk/** +bbmap/index: + - modules/bbmap/index/** + - tests/modules/bbmap/index/** + bcftools/concat: - modules/bcftools/concat/** - tests/modules/bcftools/concat/** @@ -245,7 +249,7 @@ deeptools/plotprofile: delly/call: - modules/delly/call/** - tests/modules/delly/call/** - + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** diff --git a/tests/modules/bbmap/index/main.nf b/tests/modules/bbmap/index/main.nf new file mode 100644 index 00000000..3dcb63fd --- /dev/null +++ b/tests/modules/bbmap/index/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) + +workflow test_bbmap_index { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + + BBMAP_INDEX ( input ) +} diff --git a/tests/modules/bbmap/index/test.yml b/tests/modules/bbmap/index/test.yml new file mode 100644 index 00000000..c4a25539 --- /dev/null +++ b/tests/modules/bbmap/index/test.yml @@ -0,0 +1,12 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core modules create-test-yml bbmap/index +- name: bbmap index + command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c tests/config/nextflow.config + tags: + - bbmap + - bbmap/index + files: + - path: output/bbmap/ref/genome/1/chr1.chrom.gz + md5sum: fc20702f3378836f06d4104b9cd88918 + - path: output/bbmap/ref/index/1/chr1_index_k13_c15_b1.block + md5sum: 9f0d9a7413c1d2c16cc24555b2381163 From 0732028e152e6ccd3b11a2f71617c746aacf04f8 Mon Sep 17 00:00:00 2001 From: Kevin Brick Date: Tue, 7 Sep 2021 14:36:06 -0400 Subject: [PATCH 054/314] Add bedtools/makewindows module (#658) * Add bedtools/makewindows module --- modules/bedtools/makewindows/functions.nf | 68 +++++++++++++++++++++ modules/bedtools/makewindows/main.nf | 42 +++++++++++++ modules/bedtools/makewindows/meta.yml | 43 +++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bedtools/makewindows/main.nf | 14 +++++ tests/modules/bedtools/makewindows/test.yml | 8 +++ 6 files changed, 179 insertions(+) create mode 100644 modules/bedtools/makewindows/functions.nf create mode 100644 modules/bedtools/makewindows/main.nf create mode 100644 modules/bedtools/makewindows/meta.yml create mode 100644 tests/modules/bedtools/makewindows/main.nf create mode 100644 tests/modules/bedtools/makewindows/test.yml diff --git a/modules/bedtools/makewindows/functions.nf b/modules/bedtools/makewindows/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bedtools/makewindows/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf new file mode 100644 index 00000000..d3e82f86 --- /dev/null +++ b/modules/bedtools/makewindows/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BEDTOOLS_MAKEWINDOWS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1" + } else { + container "quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1" + } + + input: + tuple val(meta), path(regions) + val(use_bed) + + output: + tuple val(meta), path("*.tab"), emit: tab + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def arg_input = use_bed ? "-b $regions" : "-g $regions" + """ + bedtools \\ + makewindows \\ + ${arg_input} \\ + $options.args \\ + > ${prefix}.tab + + echo \$(bedtools --version) | sed -e "s/bedtools v//g" > ${software}.version.txt + """ +} diff --git a/modules/bedtools/makewindows/meta.yml b/modules/bedtools/makewindows/meta.yml new file mode 100644 index 00000000..3c1378b8 --- /dev/null +++ b/modules/bedtools/makewindows/meta.yml @@ -0,0 +1,43 @@ +name: bedtools_makewindows + +description: Makes adjacent or sliding windows across a genome or BED file. +keywords: + - bed + - windows +tools: + - bedtools: + description: A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. + homepage: https://bedtools.readthedocs.io + documentation: https://bedtools.readthedocs.io/en/latest/content/tools/makewindows.html + tool_dev_url: None + doi: "10.1093/bioinformatics/btq033" + licence: ['GPL v2'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - regions: + type: file + description: BED file OR Genome details file () + pattern: "*.{bed,fai,tab}" + - use_bed: + type: boolean + description: true = input is a BED file; false = input is a genome details file +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - tab: + type: file + description: Windows TAB file (BED or BED-like format) + pattern: "*.tab" +authors: + - "@kevbrick" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a4e163c9..0c43bd88 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -90,6 +90,10 @@ bedtools/intersect: - modules/bedtools/intersect/** - tests/modules/bedtools/intersect/** +bedtools/makewindows: + - modules/bedtools/makewindows/** + - tests/modules/bedtools/makewindows/** + bedtools/maskfasta: - modules/bedtools/maskfasta/** - tests/modules/bedtools/maskfasta/** diff --git a/tests/modules/bedtools/makewindows/main.nf b/tests/modules/bedtools/makewindows/main.nf new file mode 100644 index 00000000..23c40a75 --- /dev/null +++ b/tests/modules/bedtools/makewindows/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +test_options = ['args': '-w 50 '] +include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' addParams( options: test_options ) + +workflow test_bedtools_makewindows { + + input = [ [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + BEDTOOLS_MAKEWINDOWS ( input, true ) +} diff --git a/tests/modules/bedtools/makewindows/test.yml b/tests/modules/bedtools/makewindows/test.yml new file mode 100644 index 00000000..c39d1c08 --- /dev/null +++ b/tests/modules/bedtools/makewindows/test.yml @@ -0,0 +1,8 @@ +- name: bedtools makewindows test_bedtools_makewindows + command: nextflow run tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c tests/config/nextflow.config + tags: + - bedtools/makewindows + - bedtools + files: + - path: output/bedtools/test.tab + md5sum: 0cf6ed2b6f470cd44a247da74ca4fe4e From 669fb5caed78bccbaf0f00746e2eb50644232722 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Wed, 8 Sep 2021 15:40:34 +0000 Subject: [PATCH 055/314] Homer Modules (#75) * feat(homer): Add initial makeTagDirectory * feat(homer): Add initial findPeaks module * feat(homer): Update with new options See 1d30e2c21affedc742680e8e04d60c6481d9cd11 * fix(homer): Correct findpeaks process name * fix(homer): Takes a bam file instead of bed * feat(homer): Add initial makeTagDirectory test * fix(homer): Hardcode genome and configureHomer I'd like to modularize configureHomer, but I need to figure out how exactly the genomes work. * fix(homer): bam => bed Bam requires samtools to be present, which it's not in this docker image * feat(homer): Add initial configureHomer script * ci(homer): Add initial test * test(homer): Reproducible configuration workaround - I can't run both tests(one file and two files) at the same time because it breaks - I can't copy the genome stuff from the configurehomer module because it's read only - So I can't make the makeTagDirectory module depend on configureHomer * test(homer): Add placeholder annotatepeaks The required inputs are necessarily required for all workflows from what I've used, but I'll need to look at the actual docs * test(homer): Add missing B.bed * test(homer): Rename two => groseq Then all of the various workflows that homer provides can be e2e tested * feat(homer): Add initial makeUCSCfile module * test(homer): Add start to makeUCSCfile testing * chore(homer): Add various cleanups * test(homer): Rewrite annotatepeaks Not passing yet * test(homer): Rewrite configurehomer * test(homer): Rewrite findpeaks Still failing * test(homer): Rewrite makeucscfile Not passing yet * test(homer): Rewrite maketagdirectory All homer modules now follow the new structure. Time to make them pass. * test(homer): Fix typo for workflow name * fix(homer): Use correct container * fix(homer): Accept fasta in maketagdirectory Apparently all of the homer stuff can just take any old fasta and you don't need to configure the genome ahead of time with configureHomer * test(homer): makeTagDirectory passes now * fix(homer): Update containers in makeucscfile * test(homer): Rewrite makeucscfile Takes input from maketagdirectory which is how the module should be used * fix(homer): Update makeUCSCFile bedgraph path * test(homer): Update makeucscfile expected output * fix(homer): Update containers in findpeaks * fix(homer): Change findpeaks args The user is just going to have to know what they're doing for now * test(homer): findPeaks rewrite with tagDir input * test(homer): Update expected files for findPeaks And bump filters * style: Appease editorconfig * ci: Remove old workflow * tests(homer): Add md5sums * test(homer): Add meta test * style(homer): Capitalize HOMER * docs(homer): Add maketagdirectory meta.yml * docs(homer): Add makeucscfile meta.yml * docs(homer): Add findpeaks meta.yml * test(homer): Update to new test data standards * chore: Remove stuff that got revived in the rebase * chore: software => modules * test(homer): Update tags * test(homer): Update annotatepeaks * ci: Fix uploading of artifacts GitHub actions doesn't like the / in the tags * test(homer): Remove annotate md5sum This is failing and breaking new tests * test(homer): Use bams instead of beds * test(homer): Fix meta maketagdirectory * test(homer): Fix input in all tests * test(homer): Move back to bed files Forgot samtools isn't present * chore(homer): Add TODOs for tests * test(homer): Add bed format arg * test(homer): Update md5sums * test(homer): Fix tags tsvs * style(homer): Appease nf-core linting * docs(homer): Be in line with what is in the main.nf file Co-authored-by: Kevin Menden Co-authored-by: Kevin Menden --- .github/workflows/pytest-workflow.yml | 2 +- modules/homer/annotatepeaks/meta.yml | 4 +- modules/homer/findpeaks/functions.nf | 68 +++++++++++++++++++ modules/homer/findpeaks/main.nf | 42 ++++++++++++ modules/homer/findpeaks/meta.yml | 37 ++++++++++ modules/homer/maketagdirectory/functions.nf | 68 +++++++++++++++++++ modules/homer/maketagdirectory/main.nf | 43 ++++++++++++ modules/homer/maketagdirectory/meta.yml | 41 +++++++++++ modules/homer/makeucscfile/functions.nf | 68 +++++++++++++++++++ modules/homer/makeucscfile/main.nf | 41 +++++++++++ modules/homer/makeucscfile/meta.yml | 38 +++++++++++ tests/config/pytest_modules.yml | 12 ++++ tests/modules/homer/findpeaks/main.nf | 17 +++++ tests/modules/homer/findpeaks/test.yml | 8 +++ tests/modules/homer/maketagdirectory/main.nf | 32 +++++++++ tests/modules/homer/maketagdirectory/test.yml | 33 +++++++++ tests/modules/homer/makeucscfile/main.nf | 17 +++++ tests/modules/homer/makeucscfile/test.yml | 7 ++ 18 files changed, 575 insertions(+), 3 deletions(-) create mode 100644 modules/homer/findpeaks/functions.nf create mode 100644 modules/homer/findpeaks/main.nf create mode 100644 modules/homer/findpeaks/meta.yml create mode 100644 modules/homer/maketagdirectory/functions.nf create mode 100644 modules/homer/maketagdirectory/main.nf create mode 100644 modules/homer/maketagdirectory/meta.yml create mode 100644 modules/homer/makeucscfile/functions.nf create mode 100644 modules/homer/makeucscfile/main.nf create mode 100644 modules/homer/makeucscfile/meta.yml create mode 100644 tests/modules/homer/findpeaks/main.nf create mode 100644 tests/modules/homer/findpeaks/test.yml create mode 100644 tests/modules/homer/maketagdirectory/main.nf create mode 100644 tests/modules/homer/maketagdirectory/test.yml create mode 100644 tests/modules/homer/makeucscfile/main.nf create mode 100644 tests/modules/homer/makeucscfile/test.yml diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 75f0b174..43f48c36 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -95,7 +95,7 @@ jobs: if: failure() uses: actions/upload-artifact@v2 with: - name: logs-${{ matrix.tags }}-${{ matrix.profile }}-${{ matrix.nxf_version }} + name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} path: | /home/runner/pytest_workflow_*/*/.nextflow.log /home/runner/pytest_workflow_*/*/log.out diff --git a/modules/homer/annotatepeaks/meta.yml b/modules/homer/annotatepeaks/meta.yml index d620e875..a22b9618 100644 --- a/modules/homer/annotatepeaks/meta.yml +++ b/modules/homer/annotatepeaks/meta.yml @@ -1,11 +1,11 @@ name: homer_annotatepeaks -description: Annotate peaks with homer +description: Annotate peaks with HOMER suite keywords: - annotations - peaks - bed tools: - - cuatadapt: + - homer: description: | HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. documentation: http://homer.ucsd.edu/homer/ diff --git a/modules/homer/findpeaks/functions.nf b/modules/homer/findpeaks/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/homer/findpeaks/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf new file mode 100644 index 00000000..5fcacc1d --- /dev/null +++ b/modules/homer/findpeaks/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +def options = initOptions(params.options) + +def VERSION = '4.11' + +process HOMER_FINDPEAKS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" + } else { + container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" + } + + input: + tuple val(meta), path(tagDir) + + output: + tuple val(meta), path("*peaks.txt"), emit: txt + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + + findPeaks \\ + $tagDir \\ + $options.args \\ + -o ${prefix}.peaks.txt + + echo $VERSION > ${software}.version.txt + """ +} diff --git a/modules/homer/findpeaks/meta.yml b/modules/homer/findpeaks/meta.yml new file mode 100644 index 00000000..d19199d7 --- /dev/null +++ b/modules/homer/findpeaks/meta.yml @@ -0,0 +1,37 @@ +name: homer_findpeaks +description: Find peaks with HOMER suite +keywords: + - annotations + - peaks +tools: + - homer: + description: | + HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. + documentation: http://homer.ucsd.edu/homer/ + doi: 10.1016/j.molcel.2010.05.004. +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - tagDir: + type: directory + description: "The 'Tag Directory'" + pattern: "tagDir" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - peaks: + type: file + description: The found peaks + pattern: "*peaks.txt" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@EMiller88" diff --git a/modules/homer/maketagdirectory/functions.nf b/modules/homer/maketagdirectory/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/homer/maketagdirectory/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf new file mode 100644 index 00000000..e0358bc1 --- /dev/null +++ b/modules/homer/maketagdirectory/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +def options = initOptions(params.options) + +def VERSION = '4.11' + +process HOMER_MAKETAGDIRECTORY { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" + } else { + container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" + } + + input: + tuple val(meta), path(bed) + path fasta + + output: + tuple val(meta), path("tag_dir"), emit: tagdir + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + makeTagDirectory \\ + tag_dir \\ + $options.args \\ + $bed \\ + -genome $fasta + + echo $VERSION > ${software}.version.txt + """ +} diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml new file mode 100644 index 00000000..78dee297 --- /dev/null +++ b/modules/homer/maketagdirectory/meta.yml @@ -0,0 +1,41 @@ +name: homer_maketagdirectory +description: Create a tag directory with the HOMER suite +keywords: + - peaks + - bed +tools: + - homer: + description: | + HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. + documentation: http://homer.ucsd.edu/homer/ + doi: 10.1016/j.molcel.2010.05.004. +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: The peak files in bed format + pattern: "*.bed" + - fasta: + type: file + description: Fasta file of reference genome + pattern: "*.fasta" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - tag_dir: + type: directory + description: The "Tag Directory" + pattern: "tag_dir" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@EMiller88" diff --git a/modules/homer/makeucscfile/functions.nf b/modules/homer/makeucscfile/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/homer/makeucscfile/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf new file mode 100644 index 00000000..876d834f --- /dev/null +++ b/modules/homer/makeucscfile/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +def options = initOptions(params.options) + +def VERSION = '4.11' + +process HOMER_MAKEUCSCFILE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" + } else { + container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" + } + + input: + tuple val(meta), path(tagDir) + + output: + tuple val(meta), path("tag_dir/*ucsc.bedGraph.gz"), emit: bedGraph + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + makeUCSCfile \\ + $tagDir \\ + -o auto + $options.args + + echo $VERSION > ${software}.version.txt + """ +} diff --git a/modules/homer/makeucscfile/meta.yml b/modules/homer/makeucscfile/meta.yml new file mode 100644 index 00000000..891cb295 --- /dev/null +++ b/modules/homer/makeucscfile/meta.yml @@ -0,0 +1,38 @@ +name: homer_makeucscfile +description: Create a UCSC bed graph with the HOMER suite +keywords: + - peaks + - bed + - bedGraph +tools: + - homer: + description: | + HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. + documentation: http://homer.ucsd.edu/homer/ + doi: 10.1016/j.molcel.2010.05.004. +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - tagDir: + type: directory + description: "The 'Tag Directory'" + pattern: "tagDir" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bedGraph: + type: file + description: The UCSC bed graph + pattern: "tag_dir/*ucsc.bedGraph.gz" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@EMiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 0c43bd88..fb033966 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -421,6 +421,18 @@ homer/annotatepeaks: - modules/homer/annotatepeaks/** - tests/modules/homer/annotatepeaks/** +homer/findpeaks: + - modules/homer/findpeaks/** + - tests/modules/homer/findpeaks/** + +homer/maketagdirectory: + - modules/homer/maketagdirectory/** + - tests/modules/homer/maketagdirectory/** + +homer/makeucscfile: + - modules/homer/makeucscfile/** + - tests/modules/homer/makeucscfile/** + iqtree: - modules/iqtree/** - tests/modules/iqtree/** diff --git a/tests/modules/homer/findpeaks/main.nf b/tests/modules/homer/findpeaks/main.nf new file mode 100644 index 00000000..06d44bdf --- /dev/null +++ b/tests/modules/homer/findpeaks/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' addParams( options: [args: '-style factor'] ) + +workflow test_homer_findpeaks { + input = [[id:'test'], + [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HOMER_MAKETAGDIRECTORY (input, fasta) + HOMER_FINDPEAKS ( HOMER_MAKETAGDIRECTORY.out.tagdir ) +} + diff --git a/tests/modules/homer/findpeaks/test.yml b/tests/modules/homer/findpeaks/test.yml new file mode 100644 index 00000000..b0b1a0df --- /dev/null +++ b/tests/modules/homer/findpeaks/test.yml @@ -0,0 +1,8 @@ +- name: homer findpeaks + command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c tests/config/nextflow.config + tags: + - homer + - homer/findpeaks + files: + - path: output/homer/test.peaks.txt + md5sum: f75ac1fea67f1e307a1ad4d059a9b6cc diff --git a/tests/modules/homer/maketagdirectory/main.nf b/tests/modules/homer/maketagdirectory/main.nf new file mode 100644 index 00000000..897aac1f --- /dev/null +++ b/tests/modules/homer/maketagdirectory/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) + +workflow test_homer_maketagdirectory { + input = [[id:'test'], + [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HOMER_MAKETAGDIRECTORY (input, fasta) +} + + +workflow test_homer_meta_maketagdirectory { + input = + [[[ id:'test1'], + [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)]], + [[ id:'test2'], + [file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]]] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + meta_input = [[id: 'meta_test']] + [ input.collect{it[1]}.flatten() ] + + HOMER_MAKETAGDIRECTORY (meta_input, fasta) +} + +// TODO Make a failing bam test +// TODO Make a pass bam test that feeds the bam through samtools first diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml new file mode 100644 index 00000000..80112c0b --- /dev/null +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -0,0 +1,33 @@ +- name: homer maketagdirectory + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c tests/config/nextflow.config + tags: + - homer + - homer/maketagdirectory + files: + - path: output/homer/tag_dir/MT192765.1.tags.tsv + md5sum: e29522171ca2169b57396495f8b97485 + - path: output/homer/tag_dir/tagAutocorrelation.txt + md5sum: 62b107c4971b94126fb89a0bc2800455 + - path: output/homer/tag_dir/tagCountDistribution.txt + md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 + - path: output/homer/tag_dir/tagInfo.txt + md5sum: 816baa642c946f8284eaa465638e9abb + - path: output/homer/tag_dir/tagLengthDistribution.txt + md5sum: e5aa2b9843ca9c04ace297280aed6af4 + +- name: homer meta maketagdirectory + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c tests/config/nextflow.config + tags: + - homer + - homer/maketagdirectory + files: + - path: output/homer/tag_dir/MT192765.1.tags.tsv + md5sum: e29522171ca2169b57396495f8b97485 + - path: output/homer/tag_dir/tagAutocorrelation.txt + md5sum: 62b107c4971b94126fb89a0bc2800455 + - path: output/homer/tag_dir/tagCountDistribution.txt + md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 + - path: output/homer/tag_dir/tagInfo.txt + md5sum: 816baa642c946f8284eaa465638e9abb + - path: output/homer/tag_dir/tagLengthDistribution.txt + md5sum: e5aa2b9843ca9c04ace297280aed6af4 diff --git a/tests/modules/homer/makeucscfile/main.nf b/tests/modules/homer/makeucscfile/main.nf new file mode 100644 index 00000000..5ed75959 --- /dev/null +++ b/tests/modules/homer/makeucscfile/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) +include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' addParams( options: [:] ) + +workflow test_homer_makeucscfile { + input = [[id:'test'], + [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HOMER_MAKETAGDIRECTORY (input, fasta) + HOMER_MAKEUCSCFILE ( HOMER_MAKETAGDIRECTORY.out.tagdir ) +} + diff --git a/tests/modules/homer/makeucscfile/test.yml b/tests/modules/homer/makeucscfile/test.yml new file mode 100644 index 00000000..4d337f41 --- /dev/null +++ b/tests/modules/homer/makeucscfile/test.yml @@ -0,0 +1,7 @@ +- name: homer makeucscfile + command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c tests/config/nextflow.config + tags: + - homer + - homer/makeucscfile + files: + - path: output/homer/tag_dir/tag_dir.ucsc.bedGraph.gz From 1c14be835bb88a4652cd00ade22a89ef97da0980 Mon Sep 17 00:00:00 2001 From: kojix2 <2xijok@gmail.com> Date: Thu, 9 Sep 2021 16:32:33 +0900 Subject: [PATCH 056/314] Update the commands in the Using existing modules section (#698) --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f82e45f8..e75a039b 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi 2. List the available modules: ```console - $ nf-core modules list + $ nf-core modules list remote ,--./,-. ___ __ __ __ ___ /,-._.--~\ @@ -65,7 +65,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi 3. Install the module in your pipeline directory: ```console - $ nf-core modules install . --tool fastqc + $ nf-core modules install fastqc ,--./,-. ___ __ __ __ ___ /,-._.--~\ @@ -92,7 +92,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi 5. Remove the module from the pipeline repository if required: ```console - $ nf-core modules remove . --tool fastqc + $ nf-core modules remove fastqc ,--./,-. ___ __ __ __ ___ /,-._.--~\ @@ -109,7 +109,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi 6. Check that a locally installed nf-core module is up-to-date compared to the one hosted in this repo: ```console - $ nf-core modules lint . --tool fastqc + $ nf-core modules lint fastqc ,--./,-. ___ __ __ __ ___ /,-._.--~\ From c5634ba99b1e49369d29974c78e4b091a91faaa3 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Sun, 12 Sep 2021 19:51:40 +0200 Subject: [PATCH 057/314] Add bcftools/view module (#669) * initial commit [ci skip] * add stubs [ci skip] * Finalize the bcftools/view module * accomodate optional files * add optional region file * refer the targets file in the tests * all tests passing * documen the optional parameters --- modules/bcftools/view/functions.nf | 68 ++++++++++++++++++++++++++++ modules/bcftools/view/main.nf | 51 +++++++++++++++++++++ modules/bcftools/view/meta.yml | 62 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/bcftools/view/main.nf | 31 +++++++++++++ tests/modules/bcftools/view/test.yml | 17 +++++++ 7 files changed, 234 insertions(+) create mode 100644 modules/bcftools/view/functions.nf create mode 100644 modules/bcftools/view/main.nf create mode 100644 modules/bcftools/view/meta.yml create mode 100644 tests/modules/bcftools/view/main.nf create mode 100644 tests/modules/bcftools/view/test.yml diff --git a/modules/bcftools/view/functions.nf b/modules/bcftools/view/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bcftools/view/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf new file mode 100644 index 00000000..92f7036b --- /dev/null +++ b/modules/bcftools/view/main.nf @@ -0,0 +1,51 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_VIEW { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" + } else { + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" + } + + input: + tuple val(meta), path(vcf), path(index) + path(regions) + path(targets) + path(samples) + + output: + tuple val(meta), path("*.gz") , emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def regions_file = regions ? "--regions-file ${regions}" : "" + def targets_file = targets ? "--targets-file ${targets}" : "" + def samples_file = samples ? "--samples-file ${samples}" : "" + + + """ + bcftools view \\ + --output ${prefix}.vcf.gz \\ + ${regions_file} \\ + ${targets_file} \\ + ${samples_file} \\ + $options.args \\ + --threads $task.cpus \\ + ${vcf} + + echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bcftools/view/meta.yml b/modules/bcftools/view/meta.yml new file mode 100644 index 00000000..947e2562 --- /dev/null +++ b/modules/bcftools/view/meta.yml @@ -0,0 +1,62 @@ +name: bcftools_view +description: View, subset and filter VCF or BCF files by position and filtering expression. Convert between VCF and BCF +keywords: + - variant calling + - view + - bcftools + - VCF + +tools: + - view: + description: | + View, subset and filter VCF or BCF files by position and filtering expression. Convert between VCF and BCF + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: http://www.htslib.org/doc/bcftools.html + doi: 10.1093/bioinformatics/btp352 +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: | + The vcf file to be inspected. + e.g. 'file.vcf' + - index: + type: file + description: | + The tab index for the VCF file to be inspected. + e.g. 'file.tbi' + - regions: + type: file + description: | + Optionally, restrict the operation to regions listed in this file. + e.g. 'file.vcf' + - targets: + type: file + description: | + Optionally, restrict the operation to regions listed in this file (doesn't rely upon index files) + e.g. 'file.vcf' + - samples: + type: file + description: | + Optional, file of sample names to be included or excluded. + e.g. 'file.tsv' +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF normalized output file + pattern: "*.{vcf.gz}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index fb033966..042040f7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -70,6 +70,10 @@ bcftools/stats: - modules/bcftools/stats/** - tests/modules/bcftools/stats/** +bcftools/view: + - modules/bcftools/view/** + - tests/modules/bcftools/view/** + bedtools/bamtobed: - modules/bedtools/bamtobed/** - tests/modules/bedtools/bamtobed/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 73232088..e03bb2a8 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -67,6 +67,7 @@ params { test2_vcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test2.vcf" test2_vcf_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test2.vcf.gz" test2_vcf_gz_tbi = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test2.vcf.gz.tbi" + test2_vcf_targets_tsv_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test2.targets.tsv.gz" test3_vcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test3.vcf" test3_vcf_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test3.vcf.gz" test3_vcf_gz_tbi = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test3.vcf.gz.tbi" diff --git a/tests/modules/bcftools/view/main.nf b/tests/modules/bcftools/view/main.nf new file mode 100644 index 00000000..a8ac3b31 --- /dev/null +++ b/tests/modules/bcftools/view/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' addParams( options: ['args': '--no-version'] ) + +workflow test_bcftools_view { + + regions = [] + targets = [] + samples = [] + + input = [[ id:'out', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)] + + BCFTOOLS_VIEW ( input, regions, targets, samples ) +} + +workflow test_bcftools_view_with_optional_files { + + regions = file(params.test_data['sarscov2']['illumina']['test3_vcf_gz'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['illumina']['test2_vcf_targets_tsv_gz'], checkIfExists: true) + samples = [] + + input = [[ id:'out', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf_gz_tbi'], checkIfExists: true)] + + BCFTOOLS_VIEW ( input, regions, targets, samples ) +} diff --git a/tests/modules/bcftools/view/test.yml b/tests/modules/bcftools/view/test.yml new file mode 100644 index 00000000..179e9a1c --- /dev/null +++ b/tests/modules/bcftools/view/test.yml @@ -0,0 +1,17 @@ +- name: bcftools view + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/view + files: + - path: output/bcftools/out.vcf.gz + md5sum: fc178eb342a91dc0d1d568601ad8f8e2 + +- name: bcftools view with optional files + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/view + files: + - path: output/bcftools/out.vcf.gz + md5sum: 1d450e1c65b081ead0edbf5e4fa539ee From 1023a98b51f8cb2692907429893f5b26628aa20a Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Mon, 13 Sep 2021 16:04:14 +0100 Subject: [PATCH 058/314] Getpileupsummaries (#689) * first commit, added template files for new module * created getpileupsummaries script, tests and both yml files * fixed typo in meta.yml * Update modules/gatk4/getpileupsummaries/meta.yml changed gz_tbi to gz.tbi as suggested Co-authored-by: Maxime U. Garcia Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia Co-authored-by: Maxime U. Garcia --- modules/gatk4/getpileupsummaries/functions.nf | 68 +++++++++++++++++++ modules/gatk4/getpileupsummaries/main.nf | 48 +++++++++++++ modules/gatk4/getpileupsummaries/meta.yml | 57 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ .../modules/gatk4/getpileupsummaries/main.nf | 31 +++++++++ .../modules/gatk4/getpileupsummaries/test.yml | 17 +++++ 6 files changed, 225 insertions(+) create mode 100644 modules/gatk4/getpileupsummaries/functions.nf create mode 100644 modules/gatk4/getpileupsummaries/main.nf create mode 100644 modules/gatk4/getpileupsummaries/meta.yml create mode 100644 tests/modules/gatk4/getpileupsummaries/main.nf create mode 100644 tests/modules/gatk4/getpileupsummaries/test.yml diff --git a/modules/gatk4/getpileupsummaries/functions.nf b/modules/gatk4/getpileupsummaries/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/gatk4/getpileupsummaries/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf new file mode 100644 index 00000000..9ae95d9c --- /dev/null +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_GETPILEUPSUMMARIES { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(bam), path(bai) + path variants + path variants_idx + path sites + + output: + tuple val(meta), path('*.pileups.table'), emit: table + path '*.version.txt' , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def sitesCommand = '' + + sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " + + """ + gatk GetPileupSummaries \\ + -I $bam \\ + -V $variants \\ + $sitesCommand \\ + -O ${prefix}.pileups.table \\ + $options.args + + echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml new file mode 100644 index 00000000..e784595a --- /dev/null +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -0,0 +1,57 @@ +name: gatk4_getpileupsummaries +description: | + Summarizes counts of reads that support reference, alternate and other alleles for given sites. Results can be used with CalculateContamination. Requires a common germline variant sites file, such as from gnomAD. +keywords: + - gatk4 + - getpileupsumaries + - readcountssummary + - germlinevariantsites +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: BAM file to be summarised. + pattern: "*.bam" + - bai: + type: file + description: BAM file index. + pattern: "*.bam.bai" + - variants: + type: file + description: Population vcf of germline sequencing, containing allele fractions. Is also used as sites file if no separate sites file is specified. + pattern: "*.vcf.gz" + - variants_idx: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - sites: + type: file + description: File containing specified sites to be used for the summary. If this option is not specified, variants file is used instead automatically. + pattern: "*.interval_list" + +output: + - pileup: + type: file + description: File containing the pileup summary table. + pattern: "*.pileups.table" + - version: + type: file + description: File containing software version + pattern: "*.version.txt" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 042040f7..6c7993a3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -334,6 +334,10 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** +gatk4/getpileupsummaries: + - modules/gatk4/getpileupsummaries/** + - tests/modules/gatk4/getpileupsummaries/** + gatk4/haplotypecaller: - modules/gatk4/haplotypecaller/** - tests/modules/gatk4/haplotypecaller/** diff --git a/tests/modules/gatk4/getpileupsummaries/main.nf b/tests/modules/gatk4/getpileupsummaries/main.nf new file mode 100644 index 00000000..0c7d3fb6 --- /dev/null +++ b/tests/modules/gatk4/getpileupsummaries/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GETPILEUPSUMMARIES } from '../../../../modules/gatk4/getpileupsummaries/main.nf' addParams( options: [:] ) + +workflow test_gatk4_getpileupsummaries_just_variants { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] + + variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + sites = [] + + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) +} + +workflow test_gatk4_getpileupsummaries_separate_sites { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] + + variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + sites = file( "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.interval_list" , checkIfExists: true) + + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) +} diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml new file mode 100644 index 00000000..88cca794 --- /dev/null +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -0,0 +1,17 @@ +- name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_just_variants + command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_just_variants -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/getpileupsummaries + files: + - path: output/gatk4/test.pileups.table + md5sum: 00f92a8f7282d6129f1aca04e2c7d968 + +- name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites + command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/getpileupsummaries + files: + - path: output/gatk4/test.pileups.table + md5sum: 00f92a8f7282d6129f1aca04e2c7d968 From bd68797ffb0997f3c62d5964d78c3bd5cd7e4736 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Mon, 13 Sep 2021 16:16:23 +0100 Subject: [PATCH 059/314] Mutect2 (#682) * mutect2 files added, first draft of module code entered * removing comment from main.nf * removing comment from main.nf * test added, commit made before editing yaml * tests added, still needs final check and info/comments added * gatk4 version changed to gatk4=4.2.0.0 * multiple sample support added, information added to module yaml file * Update meta.yml fixed linting error * add keywords to meta.yml * Corrections made to meta.yml * removed whitespace from meta.yml Co-authored-by: Gavin.Mackenzie Co-authored-by: Maxime U. Garcia --- modules/gatk4/mutect2/functions.nf | 68 +++++++++++++++++++++ modules/gatk4/mutect2/main.nf | 76 ++++++++++++++++++++++++ modules/gatk4/mutect2/meta.yml | 89 ++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/gatk4/mutect2/main.nf | 62 +++++++++++++++++++ tests/modules/gatk4/mutect2/test.yml | 27 +++++++++ 6 files changed, 326 insertions(+) create mode 100644 modules/gatk4/mutect2/functions.nf create mode 100644 modules/gatk4/mutect2/main.nf create mode 100644 modules/gatk4/mutect2/meta.yml create mode 100644 tests/modules/gatk4/mutect2/main.nf create mode 100644 tests/modules/gatk4/mutect2/test.yml diff --git a/modules/gatk4/mutect2/functions.nf b/modules/gatk4/mutect2/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/gatk4/mutect2/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf new file mode 100644 index 00000000..2fcdbbd7 --- /dev/null +++ b/modules/gatk4/mutect2/main.nf @@ -0,0 +1,76 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_MUTECT2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta) , path(bam) , path(bai) , val(which_norm) + val run_single + val run_pon + path fasta + path fastaidx + path dict + path germline_resource + path germline_resource_idx + path panel_of_normals + path panel_of_normals_idx + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + tuple val(meta), path("*.f1r2.tar.gz"), optional:true, emit: f1r2 + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def inputsList = [] + def normalsList = [] + def inputsCommand = '' + def panelsCommand = '' + def normalsCommand = '' + + bam.each() {a -> inputsList.add(" -I " + a ) } + inputsCommand = inputsList.join( ' ') + + if(run_pon) { + panelsCommand = '' + normalsCommand = '' + + } else if(run_single) { + panelsCommand = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals" + normalsCommand = '' + + } else { + panelsCommand = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals --f1r2-tar-gz ${prefix}.f1r2.tar.gz" + which_norm.each() {a -> normalsList.add(" -normal " + a ) } + normalsCommand = normalsList.join( ' ') + } + + """ + gatk Mutect2 \\ + -R ${fasta} \\ + ${inputsCommand} \\ + ${normalsCommand} \\ + ${panelsCommand} \\ + -O ${prefix}.vcf.gz \\ + $options.args + + echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml new file mode 100644 index 00000000..8ba803df --- /dev/null +++ b/modules/gatk4/mutect2/meta.yml @@ -0,0 +1,89 @@ +name: gatk4_mutect2 +description: Call somatic SNVs and indels via local assembly of haplotypes. +keywords: + - gatk4 + - mutect2 + - haplotype + - somatic +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - bam: + type: list + description: list of BAM files + pattern: "*.bam" + - bai: + type: list + description: list of BAM file indexes + pattern: "*.bam.bai" + - which_norm: + type: list + description: optional list of sample headers contained in the normal sample bam files (these are required for tumor_normal_pair mode) + - run_single: + type: boolean + description: Specify whether or not to run in tumor_single mode instead of tumor_normal_pair mode (will be ignored if run_pon is also true) + - run_pon: + type: boolean + description: Specify whether or not to run in panel_of_normal mode instead of tumor_normal_pair mode + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fastaidx: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_idx: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz_tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_idx: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz_tbi" + +output: + - vcf: + type: file + description: compressed vcf file + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of vcf file + pattern: "*vcf.gz.tbi" + - f1r2: + type: file + description: file containing information to be passed to LearnReadOrientationModel (only outputted when tumor_normal_pair mode is run) + pattern: "*.f1r2.tar.gz" + - version: + type: file + description: File containing software version + pattern: "*.version.txt" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6c7993a3..a10bccf1 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -358,6 +358,10 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** +gatk4/mutect2: + - modules/gatk4/mutect2/** + - tests/modules/gatk4/mutect2/** + gatk4/revertsam: - modules/gatk4/revertsam/** - tests/modules/gatk4/revertsam/** diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf new file mode 100644 index 00000000..072b3125 --- /dev/null +++ b/tests/modules/gatk4/mutect2/main.nf @@ -0,0 +1,62 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [:] ) + +workflow test_gatk4_mutect2_tumor_normal_pair { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + ["testN"] + ] + run_single = false + run_pon = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + + GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) +} + +workflow test_gatk4_mutect2_tumor_single { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] + ] + run_single = true + run_pon = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + + GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) +} + +workflow test_gatk4_mutect2_generate_pon { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] + ] + run_single = false + run_pon = true + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = [] + germline_resource_idx = [] + panel_of_normals = [] + panel_of_normals_idx = [] + + GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) +} diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml new file mode 100644 index 00000000..03205266 --- /dev/null +++ b/tests/modules/gatk4/mutect2/test.yml @@ -0,0 +1,27 @@ +- name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair + command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.f1r2.tar.gz + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 mutect2 test_gatk4_mutect2_tumor_single + command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 mutect2 test_gatk4_mutect2_generate_pon + command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi From 59ca7444cf27ac643d136941596ff5ab3136b832 Mon Sep 17 00:00:00 2001 From: Davi Marcon <48180517+Mxrcon@users.noreply.github.com> Date: Tue, 14 Sep 2021 03:51:40 -0300 Subject: [PATCH 060/314] Adding plink/vcf module (#656) * adding plink module using nf-core tool [ci skip] * Restructures the project for plink/vcf (#1) * Add version string for plink * Create a plink/vcf module * small tweaks on main.nf and started to test [ci skip] * small changes on test args, local test with docker passed! * Update plink/vcf module listing * Update tag * fix tags as per linting guidelines * revert to the original state of tags * adding --threads to `main.nf` and `meta.yml` information Co-authored-by: Abhinav Sharma --- modules/plink/vcf/functions.nf | 68 ++++++++++++++++++++++++++++++++ modules/plink/vcf/main.nf | 44 +++++++++++++++++++++ modules/plink/vcf/meta.yml | 53 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/plink/vcf/main.nf | 13 ++++++ tests/modules/plink/vcf/test.yml | 12 ++++++ 6 files changed, 194 insertions(+) create mode 100644 modules/plink/vcf/functions.nf create mode 100644 modules/plink/vcf/main.nf create mode 100644 modules/plink/vcf/meta.yml create mode 100644 tests/modules/plink/vcf/main.nf create mode 100644 tests/modules/plink/vcf/test.yml diff --git a/modules/plink/vcf/functions.nf b/modules/plink/vcf/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/plink/vcf/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf new file mode 100644 index 00000000..39cc3825 --- /dev/null +++ b/modules/plink/vcf/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PLINK_VCF { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" + } else { + container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.bed"), emit: bed, optional: true + tuple val(meta), path("*.bim"), emit: bim, optional: true + tuple val(meta), path("*.fam"), emit: fam, optional: true + + path "*.version.txt", emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + plink \\ + --vcf ${vcf} \\ + $options.args \\ + --threads $task.cpus \\ + --out ${prefix} + + echo \$(plink --version 2>&1) | sed 's/^PLINK //' | sed 's/..-bit.*//'> ${software}.version.txt + """ +} diff --git a/modules/plink/vcf/meta.yml b/modules/plink/vcf/meta.yml new file mode 100644 index 00000000..8673158b --- /dev/null +++ b/modules/plink/vcf/meta.yml @@ -0,0 +1,53 @@ +name: plink_vcf +description: Analyses variant calling files using plink +keywords: + - plink + - vcf +tools: + - plink: + description: | + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner + homepage: "https://www.cog-genomics.org/plink" + documentation: None + tool_dev_url: "https://www.cog-genomics.org/plink/1.9/dev" + doi: "" + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Variant calling file (vcf) + pattern: "*.{vcf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + +authors: + - "@Mxrcon" + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a10bccf1..0e8c6153 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -659,6 +659,10 @@ plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** +plink/vcf: + - modules/plink/vcf/** + - tests/modules/plink/vcf/** + preseq/lcextrap: - modules/preseq/lcextrap/** - tests/modules/preseq/lcextrap/** diff --git a/tests/modules/plink/vcf/main.nf b/tests/modules/plink/vcf/main.nf new file mode 100644 index 00000000..096bacdd --- /dev/null +++ b/tests/modules/plink/vcf/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams( options: ['args':" --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr"]) + +workflow test_plink_vcf { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] + + PLINK_VCF ( input ) +} diff --git a/tests/modules/plink/vcf/test.yml b/tests/modules/plink/vcf/test.yml new file mode 100644 index 00000000..bfd54386 --- /dev/null +++ b/tests/modules/plink/vcf/test.yml @@ -0,0 +1,12 @@ +- name: plink vcf test_plink_vcf + command: nextflow run tests/modules/plink/vcf -entry test_plink_vcf -c tests/config/nextflow.config + tags: + - plink + - plink/vcf + files: + - path: output/plink/test.bed + md5sum: 55c3ab2636212911b5f952ef6f5d855c + - path: output/plink/test.bim + md5sum: 54164b6f103e152de05712c6bb317db8 + - path: output/plink/test.fam + md5sum: 22d32d7daa3ae6b819a24895e82b2a70 From 6ff995e93ddec11d8c3112edcee231ce6777a15d Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Tue, 14 Sep 2021 09:19:01 +0200 Subject: [PATCH 061/314] Add bcftools/query module (#670) * initial commit [ci skip] * add basic structure [ci skip] * finalized the bcftools/query module * add optional files [ci skip] * Add the vcf index file as param [ci skip] * update the md5sum for output file [ci skip] * all tests passing --- modules/bcftools/query/functions.nf | 68 +++++++++++++++++++++++++++ modules/bcftools/query/main.nf | 49 +++++++++++++++++++ modules/bcftools/query/meta.yml | 61 ++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/query/main.nf | 31 ++++++++++++ tests/modules/bcftools/query/test.yml | 17 +++++++ 6 files changed, 230 insertions(+) create mode 100644 modules/bcftools/query/functions.nf create mode 100644 modules/bcftools/query/main.nf create mode 100644 modules/bcftools/query/meta.yml create mode 100644 tests/modules/bcftools/query/main.nf create mode 100644 tests/modules/bcftools/query/test.yml diff --git a/modules/bcftools/query/functions.nf b/modules/bcftools/query/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bcftools/query/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf new file mode 100644 index 00000000..26eeca63 --- /dev/null +++ b/modules/bcftools/query/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_QUERY { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" + } else { + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" + } + + input: + tuple val(meta), path(vcf), path(index) + path(regions) + path(targets) + path(samples) + + output: + tuple val(meta), path("*.gz") , emit: vcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def regions_file = regions ? "--regions-file ${regions}" : "" + def targets_file = targets ? "--targets-file ${targets}" : "" + def samples_file = samples ? "--samples-file ${samples}" : "" + + """ + bcftools query \\ + --output ${prefix}.vcf.gz \\ + ${regions_file} \\ + ${targets_file} \\ + ${samples_file} \\ + $options.args \\ + ${vcf} + + echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/bcftools/query/meta.yml b/modules/bcftools/query/meta.yml new file mode 100644 index 00000000..7806c7db --- /dev/null +++ b/modules/bcftools/query/meta.yml @@ -0,0 +1,61 @@ +name: bcftools_query +description: Extracts fields from VCF or BCF files and outputs them in user-defined format. +keywords: + - query + - variant calling + - bcftools + - VCF +tools: + - query: + description: | + Extracts fields from VCF or BCF files and outputs them in user-defined format. + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: http://www.htslib.org/doc/bcftools.html + doi: 10.1093/bioinformatics/btp352 +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: | + The vcf file to be qeuried. + e.g. 'file.vcf' + - index: + type: file + description: | + The tab index for the VCF file to be inspected. + e.g. 'file.tbi' + - regions: + type: file + description: | + Optionally, restrict the operation to regions listed in this file. + e.g. 'file.vcf' + - targets: + type: file + description: | + Optionally, restrict the operation to regions listed in this file (doesn't rely upon index files) + e.g. 'file.vcf' + - samples: + type: file + description: | + Optional, file of sample names to be included or excluded. + e.g. 'file.tsv' +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF query output file + pattern: "*.{vcf.gz}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 0e8c6153..23417eef 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -62,6 +62,10 @@ bcftools/norm: - modules/bcftools/norm/** - tests/modules/bcftools/norm/** +bcftools/query: + - modules/bcftools/query/** + - tests/modules/bcftools/query/** + bcftools/reheader: - modules/bcftools/reheader/** - tests/modules/bcftools/reheader/** diff --git a/tests/modules/bcftools/query/main.nf b/tests/modules/bcftools/query/main.nf new file mode 100644 index 00000000..a16ceddf --- /dev/null +++ b/tests/modules/bcftools/query/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' addParams( options: ['args': "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" ] ) + +workflow test_bcftools_query { + + regions = [] + targets = [] + samples = [] + + input = [ [ id:'out' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)] + + BCFTOOLS_QUERY ( input, regions, targets, samples ) +} + +workflow test_bcftools_query_with_optional_files { + + regions = file(params.test_data['sarscov2']['illumina']['test3_vcf_gz'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['illumina']['test2_vcf_targets_tsv_gz'], checkIfExists: true) + samples = [] + + input = [ [ id:'out' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)] + + BCFTOOLS_QUERY ( input, regions, targets, samples ) +} diff --git a/tests/modules/bcftools/query/test.yml b/tests/modules/bcftools/query/test.yml new file mode 100644 index 00000000..fbfda92b --- /dev/null +++ b/tests/modules/bcftools/query/test.yml @@ -0,0 +1,17 @@ +- name: bcftools query + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/query + files: + - path: output/bcftools/out.vcf.gz + md5sum: c32a6d28f185822d8fe1eeb7e42ec155 + +- name: bcftools query with optional files + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/query + files: + - path: output/bcftools/out.vcf.gz + md5sum: 6bb5df49bfb5af39f7037cdf95032aac From c485109d9b79228e926bed1a860c457bcf269c65 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 15 Sep 2021 10:31:49 +0200 Subject: [PATCH 062/314] Add module: bamaligncleaner (#676) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Add bamAlignCleaner module * Add container tags * Update modules/bamaligncleaner/main.nf --- .nf-core.yml | 1 - modules/bamaligncleaner/functions.nf | 68 ++++++++++++++++++++++++++ modules/bamaligncleaner/main.nf | 40 +++++++++++++++ modules/bamaligncleaner/meta.yml | 40 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bamaligncleaner/main.nf | 13 +++++ tests/modules/bamaligncleaner/test.yml | 7 +++ 7 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 modules/bamaligncleaner/functions.nf create mode 100644 modules/bamaligncleaner/main.nf create mode 100644 modules/bamaligncleaner/meta.yml create mode 100644 tests/modules/bamaligncleaner/main.nf create mode 100644 tests/modules/bamaligncleaner/test.yml diff --git a/.nf-core.yml b/.nf-core.yml index 4f3bae33..72971af8 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -7,4 +7,3 @@ bump-versions: rseqc/inferexperiment: False rseqc/innerdistance: False sortmerna: False - malt/build: False diff --git a/modules/bamaligncleaner/functions.nf b/modules/bamaligncleaner/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bamaligncleaner/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf new file mode 100644 index 00000000..8ce73ee4 --- /dev/null +++ b/modules/bamaligncleaner/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAMALIGNCLEANER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bamaligncleaner=0.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + bamAlignCleaner \\ + $options.args \\ + -o ${prefix}.bam \\ + ${bam} + + echo \$(bamAlignCleaner --version) | sed 's/.*version //' > ${software}.version.txt + """ +} diff --git a/modules/bamaligncleaner/meta.yml b/modules/bamaligncleaner/meta.yml new file mode 100644 index 00000000..8afdd44b --- /dev/null +++ b/modules/bamaligncleaner/meta.yml @@ -0,0 +1,40 @@ +name: bamaligncleaner +description: removes unused references from header of sorted BAM/CRAM files. +keywords: + - bam +tools: + - bamaligncleaner: + description: Removes unaligned references in aligned BAM alignment file + homepage: https://github.com/maxibor/bamAlignCleaner + documentation: https://github.com/maxibor/bamAlignCleaner + tool_dev_url: https://github.com/maxibor/bamAlignCleaner + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM file + pattern: "*.{bam,cram}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: Sorted BAM/CRAM file + pattern: "*.{bam,cram}" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 23417eef..6d1f5dac 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -22,6 +22,10 @@ artic/minion: - modules/artic/minion/** - tests/modules/artic/minion/** +bamaligncleaner: + - modules/bamaligncleaner/** + - tests/modules/bamaligncleaner/** + bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** diff --git a/tests/modules/bamaligncleaner/main.nf b/tests/modules/bamaligncleaner/main.nf new file mode 100644 index 00000000..94ee005f --- /dev/null +++ b/tests/modules/bamaligncleaner/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' addParams( options: [:] ) + +workflow test_bamaligncleaner { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) ] + + BAMALIGNCLEANER ( input ) +} diff --git a/tests/modules/bamaligncleaner/test.yml b/tests/modules/bamaligncleaner/test.yml new file mode 100644 index 00000000..568925b0 --- /dev/null +++ b/tests/modules/bamaligncleaner/test.yml @@ -0,0 +1,7 @@ +- name: bamaligncleaner + command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c tests/config/nextflow.config + tags: + - bamaligncleaner + files: + - path: output/bamaligncleaner/test.bam + md5sum: 173cdb4c2713b77c528cac36ca2610fb From b2a6f5409efa6eb065c8186c606e68ff8004ba51 Mon Sep 17 00:00:00 2001 From: Mei Wu Date: Wed, 15 Sep 2021 11:02:28 +0200 Subject: [PATCH 063/314] modify markduplicates module (#681) * tested * updated meta config and test config --- modules/picard/markduplicates/main.nf | 7 +- modules/picard/markduplicates/meta.yml | 83 +++++++++++--------- tests/modules/picard/markduplicates/test.yml | 3 +- 3 files changed, 49 insertions(+), 44 deletions(-) diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index d20014bf..ac829515 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -23,6 +23,7 @@ process PICARD_MARKDUPLICATES { output: tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.bai") , optional:true, emit: bai tuple val(meta), path("*.metrics.txt"), emit: metrics path "*.version.txt" , emit: version @@ -40,9 +41,9 @@ process PICARD_MARKDUPLICATES { -Xmx${avail_mem}g \\ MarkDuplicates \\ $options.args \\ - INPUT=$bam \\ - OUTPUT=${prefix}.bam \\ - METRICS_FILE=${prefix}.MarkDuplicates.metrics.txt + -I $bam \\ + -O ${prefix}.bam \\ + -M ${prefix}.MarkDuplicates.metrics.txt echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt """ diff --git a/modules/picard/markduplicates/meta.yml b/modules/picard/markduplicates/meta.yml index 6420ce9a..b651b3a0 100644 --- a/modules/picard/markduplicates/meta.yml +++ b/modules/picard/markduplicates/meta.yml @@ -1,46 +1,51 @@ name: picard_markduplicates description: Locate and tag duplicate reads in a BAM file keywords: - - markduplicates - - pcr - - duplicates - - bam - - sam - - cram + - markduplicates + - pcr + - duplicates + - bam + - sam + - cram tools: - - picard: - description: | - A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) - data and formats such as SAM/BAM/CRAM and VCF. - homepage: https://broadinstitute.github.io/picard/ - documentation: https://broadinstitute.github.io/picard/ + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - bam: - type: file - description: BAM file - pattern: "*.{bam}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.{bam}" output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - bam: - type: file - description: BAM file with duplicate reads marked/removed - pattern: "*.{bam}" - - metrics: - type: file - description: Duplicate metrics file generated by picard - pattern: "*.{metrics.txt}" - - version: - type: file - description: File containing software version - pattern: "*.{version.txt}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file with duplicate reads marked/removed + pattern: "*.{bam}" + - bai: + type: file + description: An optional BAM index file. If desired, --CREATE_INDEX must be passed as a flag + pattern: "*.{bai}" + - metrics: + type: file + description: Duplicate metrics file generated by picard + pattern: "*.{metrics.txt}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" authors: - - "@drpatelh" + - "@drpatelh" + - "@projectoriented" diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 24f468ce..ffb385f3 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -6,8 +6,7 @@ files: - path: ./output/picard/test.MarkDuplicates.metrics.txt - path: ./output/picard/test.bam - md5sum: 3270bb142039e86aaf2ab83c540225d5 - + md5sum: b520ccdc3a9edf3c6a314983752881f2 - name: picard markduplicates on unsorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config tags: From 3a8bfc1d332c62404b29a82bfcea41805a89bea9 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Wed, 15 Sep 2021 12:38:56 +0200 Subject: [PATCH 064/314] Get rid of meta map from bbmap/index (#700) Got rid of meta map from bbmap/index --- modules/bbmap/index/main.nf | 6 +++--- modules/bbmap/index/meta.yml | 10 ---------- tests/modules/bbmap/index/main.nf | 3 +-- tests/modules/bbmap/index/test.yml | 1 - 4 files changed, 4 insertions(+), 16 deletions(-) diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index 9e04881c..0e15b13f 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -9,7 +9,7 @@ process BBMAP_INDEX { label 'process_long' publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bbmap=38.92" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { @@ -19,10 +19,10 @@ process BBMAP_INDEX { } input: - tuple val(meta), path(fasta) + path fasta output: - tuple val(meta), path('ref') , emit: index + path 'ref' , emit: index path "*.version.txt" , emit: version script: diff --git a/modules/bbmap/index/meta.yml b/modules/bbmap/index/meta.yml index f55b1a26..a51a44fd 100644 --- a/modules/bbmap/index/meta.yml +++ b/modules/bbmap/index/meta.yml @@ -14,22 +14,12 @@ tools: licence: ['UC-LBL license (see package)'] input: - - meta: - type: map - description: | - Groovy Map containing optional parameters to bbmap.sh - e.g. [ id:'test', single_end:false ] - fasta: type: fasta description: fasta formatted file with nucleotide sequences pattern: "*.{fna,fa,fasta}" output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - version: type: file description: File containing software version diff --git a/tests/modules/bbmap/index/main.nf b/tests/modules/bbmap/index/main.nf index 3dcb63fd..0d912615 100644 --- a/tests/modules/bbmap/index/main.nf +++ b/tests/modules/bbmap/index/main.nf @@ -6,8 +6,7 @@ include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams workflow test_bbmap_index { - input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + input = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) BBMAP_INDEX ( input ) } diff --git a/tests/modules/bbmap/index/test.yml b/tests/modules/bbmap/index/test.yml index c4a25539..32684ad4 100644 --- a/tests/modules/bbmap/index/test.yml +++ b/tests/modules/bbmap/index/test.yml @@ -7,6 +7,5 @@ - bbmap/index files: - path: output/bbmap/ref/genome/1/chr1.chrom.gz - md5sum: fc20702f3378836f06d4104b9cd88918 - path: output/bbmap/ref/index/1/chr1_index_k13_c15_b1.block md5sum: 9f0d9a7413c1d2c16cc24555b2381163 From 1bc3f6cf39dc0365cf1453814cfa0d89b0465de2 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Wed, 15 Sep 2021 17:36:49 +0200 Subject: [PATCH 065/314] module: snp-dists (#697) * initial commit for draft [ci skip] * baseline code [ci skip] * update the test [ci skip] * finalize the description and all tests passing * accomodate optional args [ci skip] * fix the leftover todo statement * Update modules/snpdists/main.nf Co-authored-by: Harshil Patel --- modules/snpdists/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/snpdists/main.nf | 38 ++++++++++++++++++ modules/snpdists/meta.yml | 41 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/snpdists/main.nf | 13 +++++++ tests/modules/snpdists/test.yml | 7 ++++ 6 files changed, 171 insertions(+) create mode 100644 modules/snpdists/functions.nf create mode 100644 modules/snpdists/main.nf create mode 100644 modules/snpdists/meta.yml create mode 100644 tests/modules/snpdists/main.nf create mode 100644 tests/modules/snpdists/test.yml diff --git a/modules/snpdists/functions.nf b/modules/snpdists/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/snpdists/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf new file mode 100644 index 00000000..c103bb33 --- /dev/null +++ b/modules/snpdists/main.nf @@ -0,0 +1,38 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SNPDISTS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::snp-dists=0.8.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0" + } else { + container "quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0" + } + + input: + tuple val(meta), path(alignment) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + snp-dists \\ + $options.args \\ + $alignment > ${prefix}.tsv + + echo \$(snp-dists -v 2>&1) | sed 's/snp-dists //;' > ${software}.version.txt + """ +} diff --git a/modules/snpdists/meta.yml b/modules/snpdists/meta.yml new file mode 100644 index 00000000..590d034a --- /dev/null +++ b/modules/snpdists/meta.yml @@ -0,0 +1,41 @@ +name: snpdists +description: Pairwise SNP distance matrix from a FASTA sequence alignment +keywords: + - snp-dists + - distance-matrix +tools: + - snpdists: + description: Convert a FASTA alignment to SNP distance matrix + homepage: https://github.com/tseemann/snp-dists + documentation: https://github.com/tseemann/snp-dists + tool_dev_url: https://github.com/tseemann/snp-dists + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - alignment: + type: file + description: The input FASTA sequence alignment file + pattern: "*.{fasta,fasta.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - tsv: + type: file + description: The output TSV file containing SNP distance matrix + pattern: "*.tsv" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6d1f5dac..48a68a64 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -831,6 +831,10 @@ shovill: - modules/shovill/** - tests/modules/shovill/** +snpdists: + - modules/snpdists/** + - tests/modules/snpdists/** + snpeff: - modules/snpeff/** - tests/modules/snpeff/** diff --git a/tests/modules/snpdists/main.nf b/tests/modules/snpdists/main.nf new file mode 100644 index 00000000..8a29effa --- /dev/null +++ b/tests/modules/snpdists/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SNPDISTS } from '../../../modules/snpdists/main.nf' addParams( options: [:] ) + +workflow test_snpdists { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['informative_sites_fas'], checkIfExists: true) ] + + SNPDISTS ( input ) +} diff --git a/tests/modules/snpdists/test.yml b/tests/modules/snpdists/test.yml new file mode 100644 index 00000000..d140ce6e --- /dev/null +++ b/tests/modules/snpdists/test.yml @@ -0,0 +1,7 @@ +- name: snpdists + command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c tests/config/nextflow.config + tags: + - snpdists + files: + - path: output/snpdists/test.tsv + md5sum: 0018e5ec43990eb16abe2411fff4e47e From 26fc6179833a63d44c0f74336e472eaa0f0ae7ac Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 15 Sep 2021 17:37:46 +0200 Subject: [PATCH 066/314] Update: MALT_RUN (#706) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Bump MALT versions for MALT-run Co-authored-by: Harshil Patel --- modules/malt/run/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index 7b327d6e..8add081c 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -11,11 +11,11 @@ process MALT_RUN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::malt=0.5.2" : null) + conda (params.enable_conda ? "bioconda::malt=0.53" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.5.2--0" + container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" } else { - container "quay.io/biocontainers/malt:0.5.2--0" + container "quay.io/biocontainers/malt:0.53--hdfd78af_0" } input: From 561f16fe7430eefce5b668afccde7c20790dda0f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 15 Sep 2021 17:46:29 +0200 Subject: [PATCH 067/314] update: MALT build version bump (#699) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Bump versions Co-authored-by: Harshil Patel --- modules/malt/build/main.nf | 8 +++----- tests/modules/malt/build_test/test.yml | 8 ++++---- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index efadf9cf..a7e3751b 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -11,13 +11,11 @@ process MALT_BUILD { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - // Do not **auto-bump** due to problem with change of version numbering between 0.4.1 and 0.5.2 - // (originally 0.4.1 was listed as 0.41, so is always selected as 'latest' even though it is not!) - conda (params.enable_conda ? "bioconda::malt=0.5.2" : null) + conda (params.enable_conda ? "bioconda::malt=0.53" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.5.2--0" + container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" } else { - container "quay.io/biocontainers/malt:0.5.2--0" + container "quay.io/biocontainers/malt:0.53--hdfd78af_0" } input: diff --git a/tests/modules/malt/build_test/test.yml b/tests/modules/malt/build_test/test.yml index 23ca4550..c3ed4b8f 100644 --- a/tests/modules/malt/build_test/test.yml +++ b/tests/modules/malt/build_test/test.yml @@ -17,9 +17,9 @@ - path: output/malt/malt_index/taxonomy.idx md5sum: bb335e7c378a5bd85761b6eeed16d984 - path: output/malt/malt_index/taxonomy.map - md5sum: ae2ea08b2119eba932a9cbcd9e634917 + md5sum: f6b05bbab2149e388cb769098e14d433 - path: output/malt/malt_index/taxonomy.tre - md5sum: 511ec8ff4fd8aaa20d59b5a91ed4e852 + md5sum: bde26a1fff5c63d3046d3863607a1e70 - name: malt build gff command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c tests/config/nextflow.config tags: @@ -43,6 +43,6 @@ - path: output/malt/malt_index/taxonomy.idx md5sum: bb335e7c378a5bd85761b6eeed16d984 - path: output/malt/malt_index/taxonomy.map - md5sum: ae2ea08b2119eba932a9cbcd9e634917 + md5sum: f6b05bbab2149e388cb769098e14d433 - path: output/malt/malt_index/taxonomy.tre - md5sum: 511ec8ff4fd8aaa20d59b5a91ed4e852 + md5sum: bde26a1fff5c63d3046d3863607a1e70 From 58134cb92947e6ffb151e9763d9eb43886315cb6 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Wed, 15 Sep 2021 18:20:55 +0200 Subject: [PATCH 068/314] Chromap Module (#659) * Initialise chromap module * Revert "Initialise chromap module" This reverts commit 47c67ae231a6f221ef5b9b7b444b583b5406852b. * Remake chromap base files with new layout * Copy chromap * Copy index * Add compression * Update padding * Update container * Update chromap input test data * Add chromap chromap tests * Add padding * Update comment * update yaml file * Remove TODOs * Add fasta input to yaml * Update YAML * Remove comment, update container * Remove comments * Import Chromap index * Update test.yml * Fix read input * Update test.yml * Add bcftools/concat module. (#641) * draft for bcftools modules [ci skip] * initial test for bcftools concat * Update the params for testing * fix tests * Accomodate code review [ci skip] Co-authored-by: James A. Fellows Yates * Update the meta file and open PR for review * Update the keyword * Update the tags for module [ci skip[ * add threads Co-authored-by: James A. Fellows Yates * add module for dragonflye (#633) * add module for dragonflye * fix tests for dragonflye * Update test.yml * Update meta.yml * Update main.nf * Update main.nf * Update modules/dragonflye/meta.yml Co-authored-by: Gregor Sturm * update typos. change quote from ' to ". (#652) * Add bcftools/norm module (#655) * Initial draft [ci skip] * trigger first test * update output file path * Tests passing * finishing touches for meta.yml and update checksum * tweak checksum * add threads to the module * skip version info for matching test md5sum [ci skip] * Add ref fasta and finalize the module Co-authored-by: Gregor Sturm * Expansionhunter (#666) Please enter the commit message for your changes. Lines starting * adds expansionhunter module Co-authored-by: Maxime U. Garcia * Update test.yml (#668) * Specify in guidelines one should split CPUs when module has n > 1 tool (#660) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Describe CPU splitting * Update README.md Co-authored-by: Gregor Sturm * More CPU examples Co-authored-by: Gregor Sturm * Add dsh-bio export-segments module (#631) Co-authored-by: Gregor Sturm * update: `BWA/ALN` (#653) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Remove reads from output channel following module guidelines. Should do a .join() based on $meta, to reassociate. Co-authored-by: Gregor Sturm * Update seqwish reported version to match bioconda version. (#678) * Bbmap index (#683) BBMap index module * Initialise chromap module * Revert "Initialise chromap module" This reverts commit 47c67ae231a6f221ef5b9b7b444b583b5406852b. * Remove unnecessary files * Remove unnecessary files * Update modules/chromap/index/main.nf Co-authored-by: Harshil Patel * Update modules/chromap/index/main.nf Co-authored-by: Harshil Patel * Update modules/chromap/chromap/main.nf Co-authored-by: Harshil Patel * Update tests/modules/chromap/chromap/main.nf Co-authored-by: Harshil Patel * Update tests/modules/chromap/chromap/main.nf Co-authored-by: Harshil Patel * Update tests/modules/chromap/chromap/main.nf Co-authored-by: Harshil Patel * Update modules/chromap/index/main.nf Co-authored-by: Harshil Patel * Remove pytest_software.yml * Apply suggestions from code review Co-authored-by: Abhinav Sharma Co-authored-by: James A. Fellows Yates Co-authored-by: Robert A. Petit III Co-authored-by: Gregor Sturm Co-authored-by: JIANHONG OU Co-authored-by: Anders Jemt Co-authored-by: Maxime U. Garcia Co-authored-by: Michael L Heuer Co-authored-by: Daniel Lundin Co-authored-by: Harshil Patel --- modules/chromap/chromap/functions.nf | 68 +++++++++++++++++++ modules/chromap/chromap/main.nf | 93 ++++++++++++++++++++++++++ modules/chromap/chromap/meta.yml | 88 ++++++++++++++++++++++++ modules/chromap/index/functions.nf | 68 +++++++++++++++++++ modules/chromap/index/main.nf | 40 +++++++++++ modules/chromap/index/meta.yml | 33 +++++++++ tests/config/pytest_modules.yml | 8 +++ tests/modules/chromap/chromap/main.nf | 79 ++++++++++++++++++++++ tests/modules/chromap/chromap/test.yml | 32 +++++++++ tests/modules/chromap/index/main.nf | 12 ++++ tests/modules/chromap/index/test.yml | 8 +++ 11 files changed, 529 insertions(+) create mode 100644 modules/chromap/chromap/functions.nf create mode 100644 modules/chromap/chromap/main.nf create mode 100644 modules/chromap/chromap/meta.yml create mode 100644 modules/chromap/index/functions.nf create mode 100644 modules/chromap/index/main.nf create mode 100644 modules/chromap/index/meta.yml create mode 100644 tests/modules/chromap/chromap/main.nf create mode 100644 tests/modules/chromap/chromap/test.yml create mode 100644 tests/modules/chromap/index/main.nf create mode 100644 tests/modules/chromap/index/test.yml diff --git a/modules/chromap/chromap/functions.nf b/modules/chromap/chromap/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/chromap/chromap/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf new file mode 100644 index 00000000..330ba60e --- /dev/null +++ b/modules/chromap/chromap/main.nf @@ -0,0 +1,93 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = 0.1 // No version information printed + +process CHROMAP_CHROMAP { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" + } else { + container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" + } + + input: + tuple val(meta), path(reads) + path fasta + path index + path barcodes + path whitelist + path chr_order + path pairs_chr_order + + output: + tuple val(meta), path("*.bed.gz") , optional:true, emit: bed + tuple val(meta), path("*.bam") , optional:true, emit: bam + tuple val(meta), path("*.tagAlign.gz"), optional:true, emit: tagAlign + tuple val(meta), path("*.pairs.gz") , optional:true, emit: pairs + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = options.args.tokenize() + + def file_extension = options.args.contains("--SAM")? 'sam' : + options.args.contains("--TagAlign")? 'tagAlign' : + options.args.contains("--pairs")? 'pairs' : 'bed' + if (barcodes) { + args << "-b ${barcodes.join(',')}" + if (whitelist) { + args << "--barcode-whitelist $whitelist" + } + } + if (chr_order) { + args << "--chr-order $chr_order" + } + if (pairs_chr_order){ + args << "--pairs-natural-chr-order $pairs_chr_order" + } + def compression_cmds = """ + gzip ${prefix}.${file_extension} + """ + if (options.args.contains("--SAM")) { + compression_cmds = """ + samtools view $options.args2 -@ ${task.cpus} -bh \\ + -o ${prefix}.bam ${prefix}.${file_extension} + rm ${prefix}.${file_extension} + + samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + """ + } + if (meta.single_end) { + """ + chromap ${args.join(' ')} \\ + -t $task.cpus \\ + -x $index \\ + -r $fasta \\ + -1 ${reads.join(',')} \\ + -o ${prefix}.${file_extension} + echo "$VERSION" > ${software}.version.txt + """ + compression_cmds + } else { + """ + chromap ${args.join(' ')} \\ + -t $task.cpus \\ + -x $index \\ + -r $fasta \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + -o ${prefix}.${file_extension} + echo "$VERSION" > ${software}.version.txt + """ + compression_cmds + } +} diff --git a/modules/chromap/chromap/meta.yml b/modules/chromap/chromap/meta.yml new file mode 100644 index 00000000..c088ab35 --- /dev/null +++ b/modules/chromap/chromap/meta.yml @@ -0,0 +1,88 @@ +name: chromap_chromap +description: | + Performs preprocessing and alignment of chromatin fastq files to + fasta reference files using chromap. +keywords: + - chromap + - alignment + - map + - fastq + - bam + - sam + - hi-c + - atac-seq + - chip-seq + - trimming + - duplicate removal +tools: + - chromap: + description: Fast alignment and preprocessing of chromatin profiles + homepage: https://github.com/haowenz/chromap + documentation: https://github.com/haowenz/chromap + tool_dev_url: https://github.com/haowenz/chromap + doi: "" + licence: ['GPL v3'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - fasta: + type: file + description: | + The fasta reference file. + - index: + type: file + description: | + Chromap genome index files (*.index) + - barcodes: + type: file + description: | + Cell barcode files + - whitelist: + type: file + description: | + Cell barcode whitelist file + - chr_order: + type: file + description: | + Custom chromosome order + - pairs_chr_order: + type: file + description: | + Natural chromosome order for pairs flipping +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bed: + type: file + description: BED file + pattern: "*.bed.gz" + - bam: + type: file + description: BAM file + pattern: "*.bam" + - tagAlign: + type: file + description: tagAlign file + pattern: "*.tagAlign.gz" + - pairs: + type: file + description: pairs file + pattern: "*.pairs.gz" + +authors: + - "@mahesh-panchal" diff --git a/modules/chromap/index/functions.nf b/modules/chromap/index/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/chromap/index/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf new file mode 100644 index 00000000..c8a75935 --- /dev/null +++ b/modules/chromap/index/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = 0.1 // No version information printed + +process CHROMAP_INDEX { + tag '$fasta' + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" + } else { + container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" + } + + input: + path fasta + + output: + path "*.index" , emit: index + path "*.version.txt", emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = fasta.baseName + """ + chromap -i $options.args \\ + -t $task.cpus \\ + -r $fasta \\ + -o ${prefix}.index + echo "$VERSION" > ${software}.version.txt + """ +} diff --git a/modules/chromap/index/meta.yml b/modules/chromap/index/meta.yml new file mode 100644 index 00000000..6a86fbeb --- /dev/null +++ b/modules/chromap/index/meta.yml @@ -0,0 +1,33 @@ +name: chromap_index +description: Indexes a fasta reference genome ready for chromatin profiling. +keywords: + - index + - fasta + - genome + - reference +tools: + - chromap: + description: Fast alignment and preprocessing of chromatin profiles + homepage: https://github.com/haowenz/chromap + documentation: https://github.com/haowenz/chromap + tool_dev_url: https://github.com/haowenz/chromap + doi: "" + licence: ['GPL v3'] + +input: + - fasta: + type: file + description: Fasta reference file. + +output: + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - index: + type: file + description: Index file of the reference genome + pattern: "*.{index}" + +authors: + - "@mahesh-panchal" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 48a68a64..ed194f80 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -226,6 +226,14 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** +chromap/chromap: + - modules/chromap/chromap/** + - tests/modules/chromap/chromap/** + +chromap/index: + - modules/chromap/index/** + - tests/modules/chromap/index/** + cnvkit: - modules/cnvkit/** - tests/modules/cnvkit/** diff --git a/tests/modules/chromap/chromap/main.nf b/tests/modules/chromap/chromap/main.nf new file mode 100644 index 00000000..a5a1fc86 --- /dev/null +++ b/tests/modules/chromap/chromap/main.nf @@ -0,0 +1,79 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' addParams( options: [:] ) +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' addParams( options: ['args': '--SAM'] ) + +workflow test_chromap_chromap_single_end { + + // Test single-end and gz compressed output + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + CHROMAP_INDEX ( fasta ) + CHROMAP_CHROMAP_BASE ( + input, // meta + read data + fasta, // reference genome + CHROMAP_INDEX.out.index, // reference index + [], // barcode file + [], // barcode whitelist + [], // chromosome order file + [] // pairs chromosome order file + ) +} + +workflow test_chromap_chromap_paired_end { + + // Test paired-end and gz compressed output + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + CHROMAP_INDEX ( fasta ) + CHROMAP_CHROMAP_BASE ( + input, // meta + read data + fasta, // reference genome + CHROMAP_INDEX.out.index, // reference index + [], // barcode file + [], // barcode whitelist + [], // chromosome order file + [] // pairs chromosome order file + ) +} + +workflow test_chromap_chromap_paired_bam { + + // Test paired-end and bam output + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + CHROMAP_INDEX ( fasta ) + CHROMAP_CHROMAP_SAM ( + input, // meta + read data + fasta, // reference genome + CHROMAP_INDEX.out.index, // reference index + [], // barcode file + [], // barcode whitelist + [], // chromosome order file + [] // pairs chromosome order file + ) +} diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml new file mode 100644 index 00000000..b2ce8137 --- /dev/null +++ b/tests/modules/chromap/chromap/test.yml @@ -0,0 +1,32 @@ +- name: chromap chromap test_chromap_chromap_single_end + command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c tests/config/nextflow.config + tags: + - chromap/chromap + - chromap + files: + - path: output/chromap/genome.index + md5sum: f889d5f61d80823766af33277d27d386 + - path: output/chromap/test.bed.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + +- name: chromap chromap test_chromap_chromap_paired_end + command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c tests/config/nextflow.config + tags: + - chromap/chromap + - chromap + files: + - path: output/chromap/genome.index + md5sum: f889d5f61d80823766af33277d27d386 + - path: output/chromap/test.bed.gz + md5sum: cafd8fb21977f5ae69e9008b220ab169 + +- name: chromap chromap test_chromap_chromap_paired_bam + command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c tests/config/nextflow.config + tags: + - chromap/chromap + - chromap + files: + - path: output/chromap/genome.index + md5sum: f889d5f61d80823766af33277d27d386 + - path: output/chromap/test.bam + md5sum: bd1e3fe0f3abd1430ae191754f16a3ed diff --git a/tests/modules/chromap/index/main.nf b/tests/modules/chromap/index/main.nf new file mode 100644 index 00000000..997baba1 --- /dev/null +++ b/tests/modules/chromap/index/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) + +workflow test_chromap_index { + + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + CHROMAP_INDEX ( input ) +} diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml new file mode 100644 index 00000000..0a99a3a0 --- /dev/null +++ b/tests/modules/chromap/index/test.yml @@ -0,0 +1,8 @@ +- name: chromap index test_chromap_index + command: nextflow run tests/modules/chromap/index -entry test_chromap_index -c tests/config/nextflow.config + tags: + - chromap/index + - chromap + files: + - path: output/chromap/genome.index + md5sum: f889d5f61d80823766af33277d27d386 From 073bbf1b265beff4d45516148aa2b074ad4426d8 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Wed, 15 Sep 2021 10:24:11 -0600 Subject: [PATCH 069/314] add staphopia-sccmec module (#702) * add staphopia-sccmec module * add additional test * change output name * Update main.nf * Update test.yml Co-authored-by: Harshil Patel --- modules/staphopiasccmec/functions.nf | 68 ++++++++++++++++++++++++++ modules/staphopiasccmec/main.nf | 36 ++++++++++++++ modules/staphopiasccmec/meta.yml | 44 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/staphopiasccmec/main.nf | 22 +++++++++ tests/modules/staphopiasccmec/test.yml | 15 ++++++ 6 files changed, 189 insertions(+) create mode 100644 modules/staphopiasccmec/functions.nf create mode 100644 modules/staphopiasccmec/main.nf create mode 100644 modules/staphopiasccmec/meta.yml create mode 100644 tests/modules/staphopiasccmec/main.nf create mode 100644 tests/modules/staphopiasccmec/test.yml diff --git a/modules/staphopiasccmec/functions.nf b/modules/staphopiasccmec/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/staphopiasccmec/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf new file mode 100644 index 00000000..85a61514 --- /dev/null +++ b/modules/staphopiasccmec/main.nf @@ -0,0 +1,36 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process STAPHOPIASCCMEC { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::staphopia-sccmec=1.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0" + } else { + container "quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv + + echo \$(staphopia-sccmec --version 2>&1) | sed 's/^.*staphopia-sccmec //' > ${software}.version.txt + """ +} diff --git a/modules/staphopiasccmec/meta.yml b/modules/staphopiasccmec/meta.yml new file mode 100644 index 00000000..2054c6b3 --- /dev/null +++ b/modules/staphopiasccmec/meta.yml @@ -0,0 +1,44 @@ +name: staphopiasccmec +description: Predicts Staphylococcus aureus SCCmec type based on primers. +keywords: + - amr + - fasta + - sccmec + +tools: + - staphopiasccmec: + description: Predicts Staphylococcus aureus SCCmec type based on primers. + homepage: https://staphopia.emory.edu + documentation: https://github.com/staphopia/staphopia-sccmec + tool_dev_url: https://github.com/staphopia/staphopia-sccmec + doi: https://doi.org/10.7717/peerj.5261 + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - tsv: + type: file + description: Tab-delimited results + pattern: "*.{tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ed194f80..3105e590 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -855,6 +855,10 @@ spades: - modules/spades/** - tests/modules/spades/** +staphopiasccmec: + - modules/staphopiasccmec/** + - tests/modules/staphopiasccmec/** + star/align: - modules/star/align/** - tests/modules/star/align/** diff --git a/tests/modules/staphopiasccmec/main.nf b/tests/modules/staphopiasccmec/main.nf new file mode 100644 index 00000000..ec1b48e4 --- /dev/null +++ b/tests/modules/staphopiasccmec/main.nf @@ -0,0 +1,22 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [:] ) +include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [args: '--hamming'] ) + +workflow test_staphopiasccmec { + + input = [ [ id:'test' ], + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + STAPHOPIASCCMEC ( input ) +} + +workflow test_staphopiasccmec_hamming { + + input = [ [ id:'test' ], + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + STAPHOPIASCCMEC_HAMMING ( input ) +} diff --git a/tests/modules/staphopiasccmec/test.yml b/tests/modules/staphopiasccmec/test.yml new file mode 100644 index 00000000..aadfec3e --- /dev/null +++ b/tests/modules/staphopiasccmec/test.yml @@ -0,0 +1,15 @@ +- name: staphopiasccmec test_staphopiasccmec + command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec -c tests/config/nextflow.config + tags: + - staphopiasccmec + files: + - path: output/staphopiasccmec/test.tsv + md5sum: e6460d4164f3af5b290c5ccdb11343bf + +- name: staphopiasccmec test_staphopiasccmec_hamming + command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c tests/config/nextflow.config + tags: + - staphopiasccmec + files: + - path: output/staphopiasccmec/test.tsv + md5sum: 164cda1b05b3b6814c1f0786d93ca070 From f4ec33ebc46bafcd60ca04ff6c962b523741c66b Mon Sep 17 00:00:00 2001 From: fbdtemme <69114541+fbdtemme@users.noreply.github.com> Date: Wed, 15 Sep 2021 18:56:12 +0200 Subject: [PATCH 070/314] Fix kallistobustools version extraction (#691) Co-authored-by: Harshil Patel --- modules/kallistobustools/count/main.nf | 2 +- modules/kallistobustools/ref/main.nf | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 86172fc3..cb561a9d 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -51,6 +51,6 @@ process KALLISTOBUSTOOLS_COUNT { ${reads[0]} \\ ${reads[1]} - echo \$(kb 2>&1) | sed 's/^kb_python //; s/Usage.*\$//' > ${software}.version.txt + echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt """ } diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index ffcd643e..93935696 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -45,7 +45,7 @@ process KALLISTOBUSTOOLS_REF { $fasta \\ $gtf - echo \$(kb 2>&1) | sed 's/^kb_python //; s/Usage.*\$//' > ${software}.version.txt + echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt """ } else { """ @@ -61,7 +61,7 @@ process KALLISTOBUSTOOLS_REF { $fasta \\ $gtf - echo \$(kb 2>&1) | sed 's/^kb_python //; s/Usage.*\$//' > ${software}.version.txt + echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt """ } } From e9c57083a0938c142e14ccf2b0275ed7724fe9a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Fern=C3=A1ndez=20Navarro?= Date: Wed, 15 Sep 2021 20:55:08 +0200 Subject: [PATCH 071/314] Fix bug in gffread (#686) * Fix bug in gffread that would cause it to fail if the input has .gtf as extension * Update test.yml * Update meta file * Update main.nf * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/gffread/main.nf | 6 +++++- modules/gffread/meta.yml | 16 +++------------- tests/modules/gffread/main.nf | 2 +- tests/modules/gffread/test.yml | 4 ++-- 4 files changed, 11 insertions(+), 17 deletions(-) diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 6c5aecbc..6b0dd666 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -27,8 +27,12 @@ process GFFREAD { script: def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${gff.baseName}${options.suffix}" : "${gff.baseName}" """ - gffread $gff $options.args -o ${gff.baseName}.gtf + gffread \\ + $gff \\ + $options.args \\ + -o ${prefix}.gtf echo \$(gffread --version 2>&1) > ${software}.version.txt """ } diff --git a/modules/gffread/meta.yml b/modules/gffread/meta.yml index 63d281f5..af2abb6e 100644 --- a/modules/gffread/meta.yml +++ b/modules/gffread/meta.yml @@ -14,26 +14,16 @@ tools: licence: ['MIT'] input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - gff: type: file - description: A GFF file in either the GFF3 or GFF2 format. - patter: "*.gff" + description: A reference file in either the GFF3, GFF2 or GTF format. + pattern: "*.{gff, gtf}" output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - gtf: type: file description: GTF file resulting from the conversion of the GFF input file - pattern: "*.gtf" + pattern: "*.{gtf}" - version: type: file description: File containing software version diff --git a/tests/modules/gffread/main.nf b/tests/modules/gffread/main.nf index e7b5441a..87e95275 100644 --- a/tests/modules/gffread/main.nf +++ b/tests/modules/gffread/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GFFREAD } from '../../../modules/gffread/main.nf' addParams( options: [:] ) +include { GFFREAD } from '../../../modules/gffread/main.nf' addParams( options: [suffix: '.out'] ) workflow test_gffread { input = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) diff --git a/tests/modules/gffread/test.yml b/tests/modules/gffread/test.yml index 21a7bb6a..48096f1e 100644 --- a/tests/modules/gffread/test.yml +++ b/tests/modules/gffread/test.yml @@ -3,5 +3,5 @@ tags: - gffread files: - - path: ./output/gffread/genome.gtf - md5sum: f184f856b7fe3e159d21b052b5dd3954 + - path: ./output/gffread/genome.out.gtf + md5sum: fcbf5744ed806e47768bc456fa043263 From e904107c22d53b99c73114b7c1577924786f9d43 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Thu, 16 Sep 2021 10:23:07 +0200 Subject: [PATCH 072/314] Chromap patch (#712) * Initialise chromap module * Revert "Initialise chromap module" This reverts commit 47c67ae231a6f221ef5b9b7b444b583b5406852b. * Remove samtools version * Add space * Remove whitespace --- modules/chromap/chromap/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 330ba60e..be60c6bd 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -64,8 +64,6 @@ process CHROMAP_CHROMAP { samtools view $options.args2 -@ ${task.cpus} -bh \\ -o ${prefix}.bam ${prefix}.${file_extension} rm ${prefix}.${file_extension} - - samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt """ } if (meta.single_end) { @@ -76,6 +74,7 @@ process CHROMAP_CHROMAP { -r $fasta \\ -1 ${reads.join(',')} \\ -o ${prefix}.${file_extension} + echo "$VERSION" > ${software}.version.txt """ + compression_cmds } else { @@ -87,6 +86,7 @@ process CHROMAP_CHROMAP { -1 ${reads[0]} \\ -2 ${reads[1]} \\ -o ${prefix}.${file_extension} + echo "$VERSION" > ${software}.version.txt """ + compression_cmds } From 18402890683bb34a575840827827db364a67b336 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Thu, 16 Sep 2021 12:42:23 +0200 Subject: [PATCH 073/314] module for `agrvate` (#693) * initiate agrvate module * remove todos [ci skip] * remove todos and fix containers [ci skip] * ready for testing Co-authored-by: Harshil Patel --- modules/agrvate/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/agrvate/main.nf | 39 +++++++++++++++++++ modules/agrvate/meta.yml | 46 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/agrvate/main.nf | 13 +++++++ tests/modules/agrvate/test.yml | 7 ++++ 6 files changed, 177 insertions(+) create mode 100644 modules/agrvate/functions.nf create mode 100644 modules/agrvate/main.nf create mode 100644 modules/agrvate/meta.yml create mode 100644 tests/modules/agrvate/main.nf create mode 100644 tests/modules/agrvate/test.yml diff --git a/modules/agrvate/functions.nf b/modules/agrvate/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/agrvate/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf new file mode 100644 index 00000000..bbbd9fa0 --- /dev/null +++ b/modules/agrvate/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process AGRVATE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::agrvate=1.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/agrvate:1.0--hdfd78af_0" + } else { + container "quay.io/biocontainers/agrvate:1.0--hdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("${fasta.baseName}-results/${fasta.baseName}-summary.tab"), emit: summary + path "${fasta.baseName}-results" , emit: results_dir + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + agrvate \\ + $options.args \\ + -i $fasta + + echo \$(agrvate -v 2>&1) | sed 's/agrvate //;' > ${software}.version.txt + """ +} diff --git a/modules/agrvate/meta.yml b/modules/agrvate/meta.yml new file mode 100644 index 00000000..97aa5f58 --- /dev/null +++ b/modules/agrvate/meta.yml @@ -0,0 +1,46 @@ +name: agrvate +description: Rapid identification of Staphylococcus aureus agr locus type and agr operon variants +keywords: + - fasta + - virulence + - Staphylococcus aureus +tools: + - agrvate: + description: Rapid identification of Staphylococcus aureus agr locus type and agr operon variants. + homepage: https://github.com/VishnuRaghuram94/AgrVATE + documentation: https://github.com/VishnuRaghuram94/AgrVATE + tool_dev_url: https://github.com/VishnuRaghuram94/AgrVATE + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: A Staphylococcus aureus fasta file. + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - summary: + type: file + description: A summary of the agrvate assessement + pattern: "*-summary.tab" + - results_dir: + type: directory + description: Results of the agrvate assessement + pattern: "*-results" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 3105e590..164e7d5d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -6,6 +6,10 @@ adapterremoval: - modules/adapterremoval/** - tests/modules/adapterremoval/** +agrvate: + - modules/agrvate/** + - tests/modules/agrvate/** + allelecounter: - modules/allelecounter/** - tests/modules/allelecounter/** diff --git a/tests/modules/agrvate/main.nf b/tests/modules/agrvate/main.nf new file mode 100644 index 00000000..58058fe3 --- /dev/null +++ b/tests/modules/agrvate/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { AGRVATE } from '../../../modules/agrvate/main.nf' addParams( options: ["args": "--mummer"] ) + +workflow test_agrvate { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + AGRVATE ( input ) +} diff --git a/tests/modules/agrvate/test.yml b/tests/modules/agrvate/test.yml new file mode 100644 index 00000000..ec413663 --- /dev/null +++ b/tests/modules/agrvate/test.yml @@ -0,0 +1,7 @@ +- name: agrvate + command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c tests/config/nextflow.config + tags: + - agrvate + files: + - path: output/agrvate/genome-results/genome-summary.tab + md5sum: 781a9e5fc6ebc9f90ddfe8753d1633db From bbf268c5d34f0ccd5102ed3e3a445d493dc92894 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Thu, 16 Sep 2021 11:48:18 +0100 Subject: [PATCH 074/314] new module: pbccs (#688) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: fill contains args * 👌 IMPROVE: One output => One Channel * 👌 IMPROVE: One input => One channel * 🐛 FIX: Update tests * 🐛 FIX: Remove TODOs from test.yaml * 👌 IMPROVE: Revert and keep bam and pbi together * 🐛 FIX: Remove old rq input from meta.yml * 👌 IMPROVE: Update test to match input channels Co-authored-by: James A. Fellows Yates --- .gitignore | 1 + modules/pbccs/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/pbccs/main.nf | 54 ++++++++++++++++++++++++++ modules/pbccs/meta.yml | 51 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 14 ++++++- tests/modules/pbccs/main.nf | 19 +++++++++ tests/modules/pbccs/test.yml | 15 ++++++++ 8 files changed, 225 insertions(+), 1 deletion(-) create mode 100644 modules/pbccs/functions.nf create mode 100644 modules/pbccs/main.nf create mode 100644 modules/pbccs/meta.yml create mode 100644 tests/modules/pbccs/main.nf create mode 100644 tests/modules/pbccs/test.yml diff --git a/.gitignore b/.gitignore index 2cbbac1a..71b9b179 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ output/ *.code-workspace .screenrc .*.sw? +tests/data/ diff --git a/modules/pbccs/functions.nf b/modules/pbccs/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/pbccs/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf new file mode 100644 index 00000000..95f9908c --- /dev/null +++ b/modules/pbccs/main.nf @@ -0,0 +1,54 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PBCCS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pbccs=6.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pbccs:6.0.0--h9ee0642_2" + } else { + container "quay.io/biocontainers/pbccs:6.0.0--h9ee0642_2" + } + + input: + tuple val(meta), path(bam), path(pbi) + val chunk_num + val chunk_on + + output: + tuple val(meta), path("*.ccs.bam") , emit: bam + tuple val(meta), path("*.ccs.bam.pbi") , emit: pbi + tuple val(meta), path("*.ccs_report.txt" ) , emit: ccs_report_txt + tuple val(meta), path("*.ccs_report.json" ) , emit: ccs_report_json + tuple val(meta), path("*.zmw_metrics.json.gz"), emit: zmw_metrics + tuple val(meta), path("*.version.txt" ) , emit: version + + script: + def software = getSoftwareName(task.process) + // def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def ccs = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs.bam' + def report_txt = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.txt' + def report_json = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.json' + def zmw_metrics = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.zmw_metrics.json.gz' + """ + ccs \\ + $bam \\ + $ccs \\ + --report-file $report_txt \\ + --report-json $report_json \\ + --metrics-json $zmw_metrics \\ + --chunk $chunk_num/$chunk_on \\ + -j $task.cpus \\ + $options.args + + echo \$(ccs --version 2>&1) | grep -e 'commit' > ${software}.version.txt + """ +} diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml new file mode 100644 index 00000000..8ed27abc --- /dev/null +++ b/modules/pbccs/meta.yml @@ -0,0 +1,51 @@ +name: pbccs +description: Pacbio ccs - Generate Higly Accurate Single-Molecule Consensus Reads +keywords: + - ccs +tools: + - pbccs: + description: pbccs - Generate Highly Accurate Single-Molecule Consensus Reads (HiFi Reads) + homepage: https://github.com/PacificBiosciences/pbbioconda + documentation: https://ccs.how/ + tool_dev_url: https://github.com/PacificBiosciences/ccs + doi: "" + licence: ['BSD-3-clause-Clear'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Raw subreads bam + pattern: "*.subreads.bam" + - pbi: + type: file + description: Pacbio BAM Index + pattern: "*.pbi" + - chunk_num: + -type: integer + -description: BAM part to process + - chunk_on: + -type: integer + -description: Total number of bam parts to process + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - css: + type: file + description: Consensus sequences + pattern: "*.ccs.bam" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 164e7d5d..f66f2d95 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -651,6 +651,10 @@ pangolin: - modules/pangolin/** - tests/modules/pangolin/** +pbccs: + - modules/pbccs/** + - tests/modules/pbccs/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index e03bb2a8..9854999d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -100,6 +100,7 @@ params { genome_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/genome.sizes" genome_bed = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" + genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi" @@ -168,7 +169,18 @@ params { test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" } 'pacbio' { - test_hifi_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/test_hifi.fastq.gz" + primers = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/primers.fasta" + alz = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.bam" + alzpbi = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.bam.pbi" + ccs = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.bam" + lima = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.bam" + refine = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.bam" + cluster = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.bam" + singletons = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.bam" + aligned = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned.bam" + alignedbai = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned.bam.bai" + genemodel1 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.bed" + genemodel2 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.2.bed" } } } diff --git a/tests/modules/pbccs/main.nf b/tests/modules/pbccs/main.nf new file mode 100644 index 00000000..db7180e8 --- /dev/null +++ b/tests/modules/pbccs/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PBCCS } from '../../../modules/pbccs/main.nf' addParams( options: [args:'--min-rq 0.9'] ) + +workflow test_pbccs { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['alz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['pacbio']['alzpbi'], checkIfExists: true) + ] + + chunk_num = 2 + chunk_on = 3 + + PBCCS ( input, chunk_num, chunk_on ) +} diff --git a/tests/modules/pbccs/test.yml b/tests/modules/pbccs/test.yml new file mode 100644 index 00000000..a90cb1ec --- /dev/null +++ b/tests/modules/pbccs/test.yml @@ -0,0 +1,15 @@ +- name: pbccs test_pbccs + command: nextflow run tests/modules/pbccs -entry test_pbccs -c tests/config/nextflow.config + tags: + - pbccs + files: + - path: output/pbccs/alz.2.ccs.bam + md5sum: b9c8093b362a07b575d52592b19fc909 + - path: output/pbccs/alz.2.ccs.bam.pbi + md5sum: 78d015230a8c957a24338581efda4e55 + - path: output/pbccs/alz.2.ccs_report.json + contains: ['Created by pbcopper v1.8.0'] + - path: output/pbccs/alz.2.ccs_report.txt + md5sum: db379e9299295679f4ca7eeb37011f08 + - path: output/pbccs/alz.2.zmw_metrics.json.gz + contains: ['zmws'] From d9dfbe9d9d06072c932fa8884b15987787df247f Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Thu, 16 Sep 2021 14:03:51 +0200 Subject: [PATCH 075/314] Add pydamage analyze module (#705) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * simplify PR to analyze subcommand Co-authored-by: James A. Fellows Yates --- modules/pydamage/analyze/functions.nf | 68 +++++++++++++++++++++++++ modules/pydamage/analyze/main.nf | 40 +++++++++++++++ modules/pydamage/analyze/meta.yml | 55 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pydamage/analyze/main.nf | 14 +++++ tests/modules/pydamage/analyze/test.yml | 8 +++ 6 files changed, 189 insertions(+) create mode 100644 modules/pydamage/analyze/functions.nf create mode 100644 modules/pydamage/analyze/main.nf create mode 100644 modules/pydamage/analyze/meta.yml create mode 100644 tests/modules/pydamage/analyze/main.nf create mode 100644 tests/modules/pydamage/analyze/test.yml diff --git a/modules/pydamage/analyze/functions.nf b/modules/pydamage/analyze/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/pydamage/analyze/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf new file mode 100644 index 00000000..5a2f331b --- /dev/null +++ b/modules/pydamage/analyze/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PYDAMAGE_ANALYZE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" + } + + input: + tuple val(meta), path(bam), path(bai) + + output: + tuple val(meta), path("pydamage_results/pydamage_results.csv"), emit: csv + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + pydamage \\ + analyze \\ + $options.args \\ + -p $task.cpus \\ + $bam + + echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g' > ${software}.version.txt + """ +} diff --git a/modules/pydamage/analyze/meta.yml b/modules/pydamage/analyze/meta.yml new file mode 100644 index 00000000..3da9f793 --- /dev/null +++ b/modules/pydamage/analyze/meta.yml @@ -0,0 +1,55 @@ +name: pydamage_analyze +description: Damage parameter estimation for ancient DNA +keywords: + - ancient DNA + - aDNA + - de novo assembly + - filtering + - damage + - deamination + - miscoding lesions + - C to T + - palaeogenomics + - archaeogenomics + - palaeogenetics + - archaeogenetics +tools: + - pydamage: + description: Damage parameter estimation for ancient DNA + homepage: https://github.com/maxibor/pydamage + documentation: https://pydamage.readthedocs.io/ + tool_dev_url: https://github.com/maxibor/pydamage + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - bai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - csv: + type: file + description: PyDamage results as csv files + pattern: "*.csv" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f66f2d95..1e33a9d9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -703,6 +703,10 @@ pycoqc: - modules/pycoqc/** - tests/modules/pycoqc/** +pydamage/analyze: + - modules/pydamage/analyze/** + - tests/modules/pydamage/analyze/** + qcat: - modules/qcat/** - tests/modules/qcat/** diff --git a/tests/modules/pydamage/analyze/main.nf b/tests/modules/pydamage/analyze/main.nf new file mode 100644 index 00000000..ddf0b27a --- /dev/null +++ b/tests/modules/pydamage/analyze/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) + +workflow test_pydamage { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + PYDAMAGE_ANALYZE ( input ) +} diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml new file mode 100644 index 00000000..c54e64a4 --- /dev/null +++ b/tests/modules/pydamage/analyze/test.yml @@ -0,0 +1,8 @@ +- name: test_pydamage_analyze + command: nextflow run tests/modules/pydamage/analyze -entry test_pydamage -c tests/config/nextflow.config + tags: + - pydamage + - pydamage/analyze + files: + - path: output/pydamage/pydamage_results/pydamage_results.csv + md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 From 7e45cbf4d1e48023cb40a2e047854fb57e1c9832 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Thu, 16 Sep 2021 15:05:49 +0200 Subject: [PATCH 076/314] Add PyDamage filter (#713) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/pydamage/filter/functions.nf | 68 ++++++++++++++++++++++++++ modules/pydamage/filter/main.nf | 40 +++++++++++++++ modules/pydamage/filter/meta.yml | 51 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pydamage/filter/main.nf | 16 ++++++ tests/modules/pydamage/filter/test.yml | 10 ++++ 6 files changed, 189 insertions(+) create mode 100644 modules/pydamage/filter/functions.nf create mode 100644 modules/pydamage/filter/main.nf create mode 100644 modules/pydamage/filter/meta.yml create mode 100644 tests/modules/pydamage/filter/main.nf create mode 100644 tests/modules/pydamage/filter/test.yml diff --git a/modules/pydamage/filter/functions.nf b/modules/pydamage/filter/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/pydamage/filter/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf new file mode 100644 index 00000000..0010a7e0 --- /dev/null +++ b/modules/pydamage/filter/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PYDAMAGE_FILTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" + } + + input: + tuple val(meta), path(csv) + + output: + tuple val(meta), path("pydamage_results/pydamage_filtered_results.csv"), emit: csv + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + + pydamage \\ + filter \\ + $options.args \\ + $csv + + echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g' > ${software}.version.txt + """ +} diff --git a/modules/pydamage/filter/meta.yml b/modules/pydamage/filter/meta.yml new file mode 100644 index 00000000..0870636b --- /dev/null +++ b/modules/pydamage/filter/meta.yml @@ -0,0 +1,51 @@ +name: pydamage_filter +description: Damage parameter estimation for ancient DNA +keywords: + - ancient DNA + - aDNA + - de novo assembly + - filtering + - damage + - deamination + - miscoding lesions + - C to T + - palaeogenomics + - archaeogenomics + - palaeogenetics + - archaeogenetics +tools: + - pydamage: + description: Damage parameter estimation for ancient DNA + homepage: https://github.com/maxibor/pydamage + documentation: https://pydamage.readthedocs.io/ + tool_dev_url: https://github.com/maxibor/pydamage + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - csv: + type: file + description: csv file from pydamage analyze + pattern: "*.csv" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - csv: + type: file + description: PyDamage filtered results as csv file + pattern: "*.csv" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 1e33a9d9..fdcbbf94 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -707,6 +707,10 @@ pydamage/analyze: - modules/pydamage/analyze/** - tests/modules/pydamage/analyze/** +pydamage/filter: + - modules/pydamage/filter/** + - tests/modules/pydamage/filter/** + qcat: - modules/qcat/** - tests/modules/qcat/** diff --git a/tests/modules/pydamage/filter/main.nf b/tests/modules/pydamage/filter/main.nf new file mode 100644 index 00000000..03e90408 --- /dev/null +++ b/tests/modules/pydamage/filter/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) +include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' addParams( options: [:] ) + +workflow test_pydamage { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + PYDAMAGE_ANALYZE ( input ) + PYDAMAGE_FILTER (PYDAMAGE_ANALYZE.out.csv) +} diff --git a/tests/modules/pydamage/filter/test.yml b/tests/modules/pydamage/filter/test.yml new file mode 100644 index 00000000..e131d505 --- /dev/null +++ b/tests/modules/pydamage/filter/test.yml @@ -0,0 +1,10 @@ +- name: test_pydamage_filter + command: nextflow run tests/modules/pydamage/filter -entry test_pydamage -c tests/config/nextflow.config + tags: + - pydamage + - pydamage/filter + files: + - path: output/pydamage/pydamage_results/pydamage_filtered_results.csv + md5sum: 9f297233cf4932d7d7e52cc72d4727dc + - path: output/pydamage/pydamage_results/pydamage_results.csv + md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 From 3c4eaec52b293970b536eee0b59c706be6df83b2 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Fri, 17 Sep 2021 09:56:56 +0100 Subject: [PATCH 077/314] Update for mutect2: add output channel for stats file (#716) * added output channel for stats file, updated meta yml with description and test yml with check for stats file * Update modules/gatk4/mutect2/main.nf Co-authored-by: GCJMackenzie Co-authored-by: Harshil Patel --- modules/gatk4/mutect2/main.nf | 7 ++++--- modules/gatk4/mutect2/meta.yml | 4 ++++ tests/modules/gatk4/mutect2/test.yml | 6 ++++++ 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 2fcdbbd7..6ab9e1c7 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -31,10 +31,11 @@ process GATK4_MUTECT2 { path panel_of_normals_idx output: - tuple val(meta), path("*.vcf.gz"), emit: vcf - tuple val(meta), path("*.tbi") , emit: tbi + tuple val(meta), path("*.vcf.gz") , emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + tuple val(meta), path("*.stats") , emit: stats tuple val(meta), path("*.f1r2.tar.gz"), optional:true, emit: f1r2 - path "*.version.txt" , emit: version + path "*.version.txt" , emit: version script: def software = getSoftwareName(task.process) diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 8ba803df..7833d694 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -76,6 +76,10 @@ output: type: file description: Index of vcf file pattern: "*vcf.gz.tbi" + - stats: + type: file + description: Stats file that pairs with output vcf file + pattern: "*vcf.gz.stats" - f1r2: type: file description: file containing information to be passed to LearnReadOrientationModel (only outputted when tumor_normal_pair mode is run) diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 03205266..16f39875 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -6,6 +6,8 @@ files: - path: output/gatk4/test.f1r2.tar.gz - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: 6ecb874e6a95aa48233587b876c2a7a9 - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_tumor_single @@ -15,6 +17,8 @@ - gatk4/mutect2 files: - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: e7ef613f7d158b8a0adf44abe5db2029 - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_generate_pon @@ -24,4 +28,6 @@ - gatk4/mutect2 files: - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: 4f77301a125913170b8e9e7828b4ca3f - path: output/gatk4/test.vcf.gz.tbi From 97b803a8a7ed1a454cd85bd604a49637effca787 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Mon, 20 Sep 2021 11:27:34 +0200 Subject: [PATCH 078/314] Module to map reads with BBMap (#717) --- modules/bbmap/align/functions.nf | 68 ++++++++++++++++++++++++++++++ modules/bbmap/align/main.nf | 59 ++++++++++++++++++++++++++ modules/bbmap/align/meta.yml | 52 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bbmap/align/main.nf | 59 ++++++++++++++++++++++++++ tests/modules/bbmap/align/test.yml | 35 +++++++++++++++ 6 files changed, 277 insertions(+) create mode 100644 modules/bbmap/align/functions.nf create mode 100644 modules/bbmap/align/main.nf create mode 100644 modules/bbmap/align/meta.yml create mode 100644 tests/modules/bbmap/align/main.nf create mode 100644 tests/modules/bbmap/align/test.yml diff --git a/modules/bbmap/align/functions.nf b/modules/bbmap/align/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/bbmap/align/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf new file mode 100644 index 00000000..eca45ddb --- /dev/null +++ b/modules/bbmap/align/main.nf @@ -0,0 +1,59 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BBMAP_ALIGN { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" + } else { + container "quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" + } + + input: + tuple val(meta), path(fastq) + path ref + + output: + tuple val(meta), path("*.bam"), emit: bam + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" + + // Set the db variable to reflect the three possible types of reference input: 1) directory + // named 'ref', 2) directory named something else (containg a 'ref' subdir) or 3) a sequence + // file in fasta format + if ( ref.isDirectory() ) { + if ( ref ==~ /(.\/)?ref\/?/ ) { + db = '' + } else { + db = "path=${ref}" + } + } else { + db = "ref=${ref}" + } + + """ + bbmap.sh \\ + $db \\ + $input \\ + out=${prefix}.bam \\ + $options.args \\ + threads=$task.cpus \\ + -Xmx${task.memory.toGiga()}g + + echo \$(bbversion.sh) > ${software}.version.txt + """ +} diff --git a/modules/bbmap/align/meta.yml b/modules/bbmap/align/meta.yml new file mode 100644 index 00000000..b008ea0f --- /dev/null +++ b/modules/bbmap/align/meta.yml @@ -0,0 +1,52 @@ +name: bbmap_align +description: write your description here +keywords: + - align + - map + - fasta + - genome + - reference +tools: + - bbmap: + description: BBMap is a short read aligner, as well as various other bioinformatic tools. + homepage: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + documentation: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + tool_dev_url: None + doi: "" + licence: ['UC-LBL license (see package)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fastq: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - ref: + type: file + description: | + Either "ref" a directory containing an index, the name of another directory + with a "ref" subdirectory containing an index or the name of a fasta formatted + nucleotide file containg the reference to map to. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bam: + type: file + description: BAM file + pattern: "*.{bam}" + +authors: + - "@erikrikarddaniel" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index fdcbbf94..cf8f731c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -34,6 +34,10 @@ bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** +bbmap/align: + - modules/bbmap/align/** + - tests/modules/bbmap/align/** + bbmap/bbduk: - modules/bbmap/bbduk/** - tests/modules/bbmap/bbduk/** diff --git a/tests/modules/bbmap/align/main.nf b/tests/modules/bbmap/align/main.nf new file mode 100644 index 00000000..248e3975 --- /dev/null +++ b/tests/modules/bbmap/align/main.nf @@ -0,0 +1,59 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) +include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' addParams( options: [:] ) +include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' addParams( options: [args: "unpigz=t" ] ) + +workflow test_bbmap_align_paired_end_fasta_ref { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BBMAP_ALIGN ( input, fasta ) +} + +workflow test_bbmap_align_paired_end_index_ref { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BBMAP_INDEX ( fasta ) + BBMAP_ALIGN ( input, BBMAP_INDEX.out.index ) +} + +workflow test_bbmap_align_single_end_index_ref { + + input = [ [ id:'test', single_end:true ], // meta map + file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BBMAP_INDEX ( fasta ) + BBMAP_ALIGN ( input, BBMAP_INDEX.out.index ) +} + +workflow test_bbmap_align_paired_end_index_ref_pigz { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BBMAP_INDEX ( fasta ) + BBMAP_ALIGN_PIGZ ( input, BBMAP_INDEX.out.index ) +} diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml new file mode 100644 index 00000000..0fcc8ce9 --- /dev/null +++ b/tests/modules/bbmap/align/test.yml @@ -0,0 +1,35 @@ +- name: bbmap align paired end fasta ref + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c tests/config/nextflow.config + tags: + - bbmap + - bbmap/align + files: + - path: output/bbmap/test.bam + md5sum: e0ec7f1eec537acf146fac1cbdd868d1 + +- name: bbmap align paired end index ref + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config + tags: + - bbmap + - bbmap/align + files: + - path: output/bbmap/test.bam + md5sum: 345a72a0d58366d75dd263b107caa460 + +- name: bbmap align single end index ref + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config + tags: + - bbmap + - bbmap/align + files: + - path: output/bbmap/test.bam + md5sum: 95f690636581ce9b27cf8568c715ae4d + +- name: bbmap align paired end index ref pigz + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config + tags: + - bbmap + - bbmap/align + files: + - path: output/bbmap/test.bam + md5sum: 441c4f196b9a82c7b224903538064308 From 3a0cd9d75a68e9b6708c0be29bc5285cedbf2487 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 20 Sep 2021 12:57:57 +0200 Subject: [PATCH 079/314] Module docs transfer - point people to website! (#720) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Transfer module docs to nf-core website * Missed a hyperlink * Update README.md Co-authored-by: Harshil Patel * Update README.md Co-authored-by: Harshil Patel * Update README.md * Update README.md Co-authored-by: Harshil Patel * Update README.md Co-authored-by: Harshil Patel --- README.md | 411 +----------------------------------------------------- 1 file changed, 4 insertions(+), 407 deletions(-) diff --git a/README.md b/README.md index e75a039b..3ef44a45 100644 --- a/README.md +++ b/README.md @@ -12,21 +12,13 @@ [![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) > THIS REPOSITORY IS UNDER ACTIVE DEVELOPMENT. SYNTAX, ORGANISATION AND LAYOUT MAY CHANGE WITHOUT NOTICE! -> PLEASE BE KIND TO OUR CODE REVIEWERS AND SUBMIT ONE PULL REQUEST PER MODULE :) A repository for hosting [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) module files containing tool-specific process definitions and their associated documentation. ## Table of contents - [Using existing modules](#using-existing-modules) -- [Adding a new module file](#adding-a-new-module-file) - - [Checklist](#checklist) - - [nf-core modules create](#nf-core-modules-create) - - [Test data](#test-data) - - [Running tests manually](#running-tests-manually) - - [Uploading to `nf-core/modules`](#uploading-to-nf-coremodules) - - [Guidelines](#guidelines) -- [Terminology](#terminology) +- [Adding new modules](#adding-new-modules) - [Help](#help) - [Citation](#citation) @@ -139,406 +131,11 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi ╰──────────────────────╯ ``` -We have plans to add other utility commands to help developers install and maintain modules downloaded from this repository so watch this space e.g. `nf-core modules update` command to automatically check and update modules installed within the pipeline. +## Adding new modules -## Adding a new module file +If you wish to contribute a new module, please see the documentation on the [nf-core website](https://nf-co.re/developers/adding_modules). -If you decide to upload a module to `nf-core/modules` then this will -ensure that it will become available to all nf-core pipelines, -and to everyone within the Nextflow community! See -[`modules/`](modules) -for examples. - -### Checklist - -Please check that the module you wish to add isn't already on [`nf-core/modules`](https://github.com/nf-core/modules/tree/master/modules): -- Use the [`nf-core modules list`](https://github.com/nf-core/tools#list-modules) command -- Check [open pull requests](https://github.com/nf-core/modules/pulls) -- Search [open issues](https://github.com/nf-core/modules/issues) - -If the module doesn't exist on `nf-core/modules`: -- Please create a [new issue](https://github.com/nf-core/modules/issues/new?assignees=&labels=new%20module&template=new_nodule.md&title=new%20module:) before adding it -- Set an appropriate subject for the issue e.g. `new module: fastqc` -- Add yourself to the `Assignees` so we can track who is working on the module - -### nf-core modules create - -We have implemented a number of commands in the `nf-core/tools` package to make it incredibly easy for you to create and contribute your own modules to nf-core/modules. - -1. Install the latest version of [`nf-core/tools`](https://github.com/nf-core/tools#installation) (`>=2.0`) -2. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) -3. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`Conda`](https://conda.io/miniconda.html) -4. [Fork and clone this repo locally](#uploading-to-nf-coremodules) -5. Set up git by adding a new remote of the nf-core git repo called `upstream` - - ```bash - git remote add upstream https://github.com/nf-core/modules.git - ``` - - Make a new branch for your module and check it out - - ```bash - git checkout -b fastqc - ``` - -6. Create a module using the [nf-core DSL2 module template](https://github.com/nf-core/tools/blob/master/nf_core/module-template/modules/main.nf): - - ```console - $ nf-core modules create . --tool fastqc --author @joebloggs --label process_low --meta - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.0 - - INFO Using Bioconda package: 'bioconda::fastqc=0.11.9' create.py:130 - INFO Using Docker / Singularity container with tag: 'fastqc:0.11.9--0' create.py:140 - INFO Created / edited following files: create.py:218 - ./modules/fastqc/functions.nf - ./modules/fastqc/main.nf - ./modules/fastqc/meta.yml - ./tests/modules/fastqc/main.nf - ./tests/modules/fastqc/test.yml - ./tests/config/pytest_modules.yml - ``` - - All of the files required to add the module to `nf-core/modules` will be created/edited in the appropriate places. The 4 files you will need to change are: - - 1. [`./modules/fastqc/main.nf`](https://github.com/nf-core/modules/blob/master/modules/fastqc/main.nf) - - This is the main script containing the `process` definition for the module. You will see an extensive number of `TODO` statements to help guide you to fill in the appropriate sections and to ensure that you adhere to the guidelines we have set for module submissions. - - 2. [`./modules/fastqc/meta.yml`](https://github.com/nf-core/modules/blob/master/modules/fastqc/meta.yml) - - This file will be used to store general information about the module and author details - the majority of which will already be auto-filled. However, you will need to add a brief description of the files defined in the `input` and `output` section of the main script since these will be unique to each module. - - 3. [`./tests/modules/fastqc/main.nf`](https://github.com/nf-core/modules/blob/master/tests/modules/fastqc/main.nf) - - Every module MUST have a test workflow. This file will define one or more Nextflow `workflow` definitions that will be used to unit test the output files created by the module. By default, one `workflow` definition will be added but please feel free to add as many as possible so we can ensure that the module works on different data types / parameters e.g. separate `workflow` for single-end and paired-end data. - - Minimal test data required for your module may already exist within this repository, in which case you may just have to change a couple of paths in this file - see the [Test data](#test-data) section for more info and guidelines for adding new standardised data if required. - - 4. [`./tests/modules/fastqc/test.yml`](https://github.com/nf-core/modules/blob/master/tests/modules/fastqc/test.yml) - - This file will contain all of the details required to unit test the main script in the point above using [pytest-workflow](https://pytest-workflow.readthedocs.io/). If possible, any outputs produced by the test workflow(s) MUST be included and listed in this file along with an appropriate check e.g. md5sum. The different test options are listed in the [pytest-workflow docs](https://pytest-workflow.readthedocs.io/en/stable/#test-options). - - As highlighted in the next point, we have added a command to make it much easier to test the workflow(s) defined for the module and to automatically create the `test.yml` with the md5sum hashes for all of the outputs generated by the module. - - `md5sum` checks are the preferable choice of test to determine file changes, however, this may not be possible for all outputs generated by some tools e.g. if they include time stamps or command-related headers. Please do your best to avoid just checking for the file being present e.g. it may still be possible to check that the file contains the appropriate text snippets. - -7. Create a yaml file containing information required for module unit testing - - ```console - $ nf-core modules create-test-yml - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.0 - - - INFO Press enter to use default values (shown in brackets) or type your own responses test_yml_builder.py:51 - ? Tool name: fastqc - Test YAML output path (- for stdout) (tests/modules/fastqc/test.yml): - INFO Looking for test workflow entry points: 'tests/modules/fastqc/main.nf' test_yml_builder.py:116 - INFO Building test meta for entry point 'test_fastqc_single_end' test_yml_builder.py:150 - Test name (fastqc test_fastqc_single_end): - Test command (nextflow run tests/modules/fastqc -entry test_fastqc_single_end -c tests/config/nextflow.config): - Test tags (comma separated) (fastqc,fastqc_single_end): - Test output folder with results (leave blank to run test): - ? Choose software profile Singularity - INFO Setting env var '$PROFILE' to 'singularity' test_yml_builder.py:258 - INFO Running 'fastqc' test with command: test_yml_builder.py:263 - nextflow run tests/modules/fastqc -entry test_fastqc_single_end -c tests/config/nextflow.config --outdir /tmp/tmpgbneftf5 - INFO Test workflow finished! test_yml_builder.py:276 - INFO Writing to 'tests/modules/fastqc/test.yml' test_yml_builder.py:293 - ``` - - > NB: See docs for [running tests manually](#running-tests-manually) if you would like to run the tests manually. - -8. Lint the module locally to check that it adheres to nf-core guidelines before submission - - ```console - $ nf-core modules lint . --tool fastqc - - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' - - nf-core/tools version 2.0 - - INFO Linting modules repo: . lint.py:102 - INFO Linting module: fastqc lint.py:106 - - ╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ - │ [!] 3 Test Warnings │ - ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - ╭──────────────┬──────────────────────────────────────────────────────────────┬──────────────────────────────────╮ - │ Module name │ Test message │ File path │ - ├──────────────┼──────────────────────────────────────────────────────────────┼──────────────────────────────────┤ - │ fastqc │ TODO string in meta.yml: #Add a description of the module... │ modules/nf-core/modules/fastqc/ │ - │ fastqc │ TODO string in meta.yml: #Add a description and other det... │ modules/nf-core/modules/fastqc/ │ - │ fastqc │ TODO string in meta.yml: #Add a description of all of the... │ modules/nf-core/modules/fastqc/ │ - ╰──────────────┴──────────────────────────────────────────────────────────────┴──────────────────────────────────╯ - ╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ - │ [!] 1 Test Failed │ - ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ - ╭──────────────┬──────────────────────────────────────────────────────────────┬──────────────────────────────────╮ - │ Module name │ Test message │ File path │ - ├──────────────┼──────────────────────────────────────────────────────────────┼──────────────────────────────────┤ - │ fastqc │ 'meta' map not emitted in output channel(s) │ modules/nf-core/modules/fastqc/ │ - ╰──────────────┴──────────────────────────────────────────────────────────────┴──────────────────────────────────╯ - ╭──────────────────────╮ - │ LINT RESULTS SUMMARY │ - ├──────────────────────┤ - │ [✔] 38 Tests Passed │ - │ [!] 3 Test Warning │ - │ [✗] 1 Test Failed │ - ╰──────────────────────╯ - ``` - -9. Once ready, the code can be pushed and a pull request (PR) created - - On a regular basis you can pull upstream changes into this branch and it is recommended to do so before pushing and creating a pull request - see below. Rather than merging changes directly from upstream the rebase strategy is recommended so that your changes are applied on top of the latest master branch from the nf-core repo. This can be performed as follows - - ```bash - git pull --rebase upstream master - ``` - - Once you are ready you can push the code and create a PR - - ```bash - git push -u origin fastqc - ``` - - Once the PR has been accepted you should delete the branch and checkout master again. - - ```bash - git checkout master - git branch -d fastqc - ``` - - In case there are commits on the local branch that didn't make it into the PR (usually commits made after the PR), git will warn about this and not delete the branch. If you are sure you want to delete, use the following command - - ```bash - git branch -D fastqc - ``` - -### Test data - -In order to test that each module added to `nf-core/modules` is actually working and to be able to track any changes to results files between module updates we have set-up a number of Github Actions CI tests to run each module on a minimal test dataset using Docker, Singularity and Conda. - -- All test data for `nf-core/modules` MUST be added to the `modules` branch of [`nf-core/test-datasets`](https://github.com/nf-core/test-datasets/tree/modules/data) and organised by filename extension. - -- In order to keep the size of this repository as minimal as possible, pre-existing files from [`nf-core/test-datasets`](https://github.com/nf-core/test-datasets/tree/modules/data) MUST be reused if at all possible. - -- Test files MUST be kept as tiny as possible. - -- If the appropriate test data doesn't exist in the `modules` branch of [`nf-core/test-datasets`](https://github.com/nf-core/test-datasets/tree/modules/data) please contact us on the [nf-core Slack `#modules` channel](https://nfcore.slack.com/channels/modules) (you can join with [this invite](https://nf-co.re/join/slack)) to discuss possible options. - -### Running tests manually - -As outlined in the [nf-core modules create](#nf-core-modules-create) section we have made it quite trivial to create an initial yaml file (via the `nf-core modules create-test-yml` command) containing a listing of all of the module output files and their associated md5sums. However, md5sum checks may not be appropriate for all output files if for example they contain timestamps. This is why it is a good idea to re-run the tests locally with `pytest-workflow` before you create your pull request adding the module. If your files do indeed have timestamps or other issues that prevent you from using the md5sum check, then you can edit the `test.yml` file to instead check that the file contains some specific content or as a last resort, if it exists. The different test options are listed in the [pytest-workflow docs](https://pytest-workflow.readthedocs.io/en/stable/#test-options). - -Please follow the steps below to run the tests locally: - -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) - -2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`Conda`](https://conda.io/miniconda.html) - -3. Install [`pytest-workflow`](https://pytest-workflow.readthedocs.io/en/stable/#installation) - -4. Start running your own tests using the appropriate [`tag`](https://github.com/nf-core/modules/blob/3d720a24fd3c766ba56edf3d4e108a1c45d353b2/tests/modules/fastqc/test.yml#L3-L5) defined in the `test.yml`: - - - Typical command with Docker: - - ```console - cd /path/to/git/clone/of/nf-core/modules/ - PROFILE=docker pytest --tag fastqc --symlink --keep-workflow-wd - ``` - - - Typical command with Singularity: - - ```console - cd /path/to/git/clone/of/nf-core/modules/ - TMPDIR=~ PROFILE=singularity pytest --tag fastqc --symlink --keep-workflow-wd - ``` - - - Typical command with Conda: - - ```console - cd /path/to/git/clone/of/nf-core/modules/ - PROFILE=conda pytest --tag fastqc --symlink --keep-workflow-wd - ``` - - - See [docs on running pytest-workflow](https://pytest-workflow.readthedocs.io/en/stable/#running-pytest-workflow) for more info. - -> :warning: if you have a module named `build` this can conflict with some pytest internal behaviour. This results in no tests being run (i.e. recieving a message of `collected 0 items`). In this case rename the `tests//build` directry to `tests//build_test`, and update the corresponding `test.yml` accordingly. An example can be seen with the [`bowtie2/build` module tests](https://github.com/nf-core/modules/tree/master/tests/modules/bowtie2/build_test). - -### Uploading to `nf-core/modules` - -[Fork](https://help.github.com/articles/fork-a-repo/) the `nf-core/modules` repository to your own GitHub account. Within the local clone of your fork add the module file to the [`modules/`](modules) directory. Please try and keep PRs as atomic as possible to aid the reviewing process - ideally, one module addition/update per PR. - -Commit and push these changes to your local clone on GitHub, and then [create a pull request](https://help.github.com/articles/creating-a-pull-request-from-a-fork/) on the `nf-core/modules` GitHub repo with the appropriate information. - -We will be notified automatically when you have created your pull request, and providing that everything adheres to nf-core guidelines we will endeavour to approve your pull request as soon as possible. - -### Guidelines - -The key words "MUST", "MUST NOT", "SHOULD", etc. are to be interpreted as described in [RFC 2119](https://tools.ietf.org/html/rfc2119). - -#### General - -- All non-mandatory command-line tool options MUST be provided as a string i.e. `options.args` where `options` is a Groovy Map that MUST be provided via the Nextflow `addParams` option when including the module via `include` in the parent workflow. - -- Software that can be piped together SHOULD be added to separate module files -unless there is a run-time, storage advantage in implementing in this way. For example, -using a combination of `bwa` and `samtools` to output a BAM file instead of a SAM file: - - ```bash - bwa mem | samtools view -B -T ref.fasta - ``` - -- Where applicable, the usage and generation of compressed files SHOULD be enforced as input and output, respectively: - - `*.fastq.gz` and NOT `*.fastq` - - `*.bam` and NOT `*.sam` - -- Where applicable, each module command MUST emit a file `.version.txt` containing a single line with the software's version in the format `` or `0.7.17` e.g. - - ```bash - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt - ``` - - If the software is unable to output a version number on the command-line then a variable called `VERSION` can be manually specified to create this file e.g. [homer/annotatepeaks module](https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf). - -- The process definition MUST NOT contain a `when` statement. - -#### Naming conventions - -- The directory structure for the module name must be all lowercase e.g. [`modules/bwa/mem/`](modules/bwa/mem/). The name of the software (i.e. `bwa`) and tool (i.e. `mem`) MUST be all one word. - -- The process name in the module file MUST be all uppercase e.g. `process BWA_MEM {`. The name of the software (i.e. `BWA`) and tool (i.e. `MEM`) MUST be all one word separated by an underscore. - -- All parameter names MUST follow the `snake_case` convention. - -- All function names MUST follow the `camelCase` convention. - -#### Input/output options - -- Input channel declarations MUST be defined for all _possible_ input files (i.e. both required and optional files). - - Directly associated auxiliary files to an input file MAY be defined within the same input channel alongside the main input channel (e.g. [BAM and BAI](https://github.com/nf-core/modules/blob/e937c7950af70930d1f34bb961403d9d2aa81c7d/modules/samtools/flagstat/main.nf#L22)). - - Other generic auxiliary files used across different input files (e.g. common reference sequences) MAY be defined using a dedicated input channel (e.g. [reference files](https://github.com/nf-core/modules/blob/3cabc95d0ed8a5a4e07b8f9b1d1f7ff9a70f61e1/modules/bwa/mem/main.nf#L21-L23)). - -- Named file extensions MUST be emitted for ALL output channels e.g. `path "*.txt", emit: txt`. - -- Optional inputs are not currently supported by Nextflow. However, passing an empty list (`[]`) instead of a file as a module parameter can be used to work around this issue. - -#### Module parameters - -- A module file SHOULD only define input and output files as command-line parameters to be executed within the process. - -- All `params` within the module MUST be initialised and used in the local context of the module. In other words, named `params` defined in the parent workflow MUST NOT be assumed to be passed to the module to allow developers to call their parameters whatever they want. In general, it may be more suitable to use additional `input` value channels to cater for such scenarios. - -- If the tool supports multi-threading then you MUST provide the appropriate parameter using the Nextflow `task` variable e.g. `--threads $task.cpus`. - -- Any parameters that need to be evaluated in the context of a particular sample e.g. single-end/paired-end data MUST also be defined within the process. - -#### Resource requirements - -- An appropriate resource `label` MUST be provided for the module as listed in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29-L46) e.g. `process_low`, `process_medium` or `process_high`. - -- If the tool supports multi-threading then you MUST provide the appropriate parameter using the Nextflow `task` variable e.g. `--threads $task.cpus`. - -- If a module contains _multiple_ tools that supports multi-threading (e.g. [piping output into a samtools command](https://github.com/nf-core/modules/blob/28b023e6f4d0d2745406d9dc6e38006882804e67/modules/bowtie2/align/main.nf#L32-L46)), you MUST assign cpus per tool such that the total number of used CPUs does not exceed `task.cpus`. - - For example, combining two (or more) tools that both (all) have multi-threading, this can be assigned to the variable [`split_cpus`](https://github.com/nf-core/modules/blob/28b023e6f4d0d2745406d9dc6e38006882804e67/modules/bowtie2/align/main.nf#L32) - - If one tool is multi-threaded and another uses a single thread, you can specify directly in the command itself e.g. with [`${task.cpus - 1}`](https://github.com/nf-core/modules/blob/6e68c1af9a514bb056c0513ebba6764efd6750fc/modules/bwa/sampe/main.nf#L42-L43) - -#### Software requirements - -[BioContainers](https://biocontainers.pro/#/) is a registry of Docker and Singularity containers automatically created from all of the software packages on [Bioconda](https://bioconda.github.io/). Where possible we will use BioContainers to fetch pre-built software containers and Bioconda to install software using Conda. - -- Software requirements SHOULD be declared within the module file using the Nextflow `container` directive. For single-tool BioContainers, the `nf-core modules create` command will automatically fetch and fill-in the appropriate Conda / Docker / Singularity definitions by parsing the information provided in the first part of the module name: - - ```nextflow - conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) // Conda package - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7" // Singularity image - } else { - container "quay.io/biocontainers/bwa:0.7.17--hed695b0_7" // Docker image - } - ``` - -- If the software is available on Conda it MUST also be defined using the Nextflow `conda` directive. Using `bioconda::bwa=0.7.17` as an example, software MUST be pinned to the channel (i.e. `bioconda`) and version (i.e. `0.7.17`). Conda packages MUST not be pinned to a build because they can vary on different platforms. - -- If required, multi-tool containers may also be available on BioContainers e.g. [`bwa` and `samtools`](https://biocontainers.pro/#/tools/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40). You can install and use the [`galaxy-tool-util`](https://anaconda.org/bioconda/galaxy-tool-util) package to search for both single- and multi-tool containers available in Conda, Docker and Singularity format. e.g. to search for Docker (hosted on Quay.io) and Singularity multi-tool containers with both `bowtie` and `samtools` installed you can use the following command: - - ```console - mulled-search --destination quay singularity --channel bioconda --search bowtie samtools | grep "mulled" - ``` - - > NB: Build information for all tools within a multi-tool container can be obtained in the `/usr/local/conda-meta/history` file within the container. - -- It is also possible for a new multi-tool container to be built and added to BioContainers by submitting a pull request on their [`multi-package-containers`](https://github.com/BioContainers/multi-package-containers) repository. - - Fork the [multi-package-containers repository](https://github.com/BioContainers/multi-package-containers) - - Make a change to the `hash.tsv` file in the `combinations` directory see [here](https://github.com/aunderwo/multi-package-containers/blob/master/combinations/hash.tsv#L124) for an example where `pysam=0.16.0.1,biopython=1.78` was added. - - Commit the code and then make a pull request to the original repo, for [example](https://github.com/BioContainers/multi-package-containers/pull/1661) - - Once the PR has been accepted a container will get built and you can find it using a search tool in the `galaxy-tool-util conda` package - - ```console - mulled-search --destination quay singularity conda --search pysam biopython | grep "mulled" - quay mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f 185a25ca79923df85b58f42deb48f5ac4481e91f-0 docker pull quay.io/biocontainers/mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 - singularity mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f 185a25ca79923df85b58f42deb48f5ac4481e91f-0 wget https://depot.galaxyproject.org/singularity/mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 - ``` - - - You can copy and paste the `mulled-*` path into the relevant Docker and Singularity lines in the Nextflow `process` definition of your module - - To confirm that this is correct. Spin up a temporary Docker container - - ```console - docker run --rm -it quay.io/biocontainers/mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 /bin/sh - ``` - - And in the command prompt type - - ```console - $ grep specs /usr/local/conda-meta/history - # update specs: ['biopython=1.78', 'pysam=0.16.0.1'] - ``` - - The packages should reflect those added to the multi-package-containers repo `hash.tsv` file - -- If the software is not available on Bioconda a `Dockerfile` MUST be provided within the module directory. We will use GitHub Actions to auto-build the containers on the [GitHub Packages registry](https://github.com/features/packages). - -#### Publishing results - -The [Nextflow `publishDir`](https://www.nextflow.io/docs/latest/process.html#publishdir) definition is currently quite limited in terms of parameter/option evaluation. To overcome this, the publishing logic we have implemented for use with DSL2 modules attempts to minimise changing the `publishDir` directive (default: `params.outdir`) in favour of constructing and appending the appropriate output directory paths via the `saveAs:` statement e.g. - -```nextflow -publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), publish_id:meta.id) } -``` - -The `saveFiles` function can be found in the [`functions.nf`](modules/fastqc/functions.nf) file of utility functions that will be copied into all module directories. It uses the various publishing `options` specified as input to the module to construct and append the relevant output path to `params.outdir`. - -We also use a standardised parameter called `params.publish_dir_mode` that can be used to alter the file publishing method (default: `copy`). - -## Terminology - -The features offered by Nextflow DSL2 can be used in various ways depending on the granularity with which you would like to write pipelines. Please see the listing below for the hierarchy and associated terminology we have decided to use when referring to DSL2 components: - -- *Module*: A `process` that can be used within different pipelines and is as atomic as possible i.e. cannot be split into another module. An example of this would be a module file containing the process definition for a single tool such as `FastQC`. At present, this repository has been created to only host atomic module files that should be added to the [`modules/`](modules/) directory along with the required documentation and tests. - -- *Sub-workflow*: A chain of multiple modules that offer a higher-level of functionality within the context of a pipeline. For example, a sub-workflow to run multiple QC tools with FastQ files as input. Sub-workflows should be shipped with the pipeline implementation and if required they should be shared amongst different pipelines directly from there. As it stands, this repository will not host sub-workflows although this may change in the future since well-written sub-workflows will be the most powerful aspect of DSL2. - -- *Workflow*: What DSL1 users would consider an end-to-end pipeline. For example, from one or more inputs to a series of outputs. This can either be implemented using a large monolithic script as with DSL1, or by using a combination of DSL2 individual modules and sub-workflows. +> Please be kind to our code reveiwers and submit one pull request per module :) ## Help From 4f0b02b7b54fab6164164b9067e2fc70460f0b8f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 20 Sep 2021 13:00:11 +0200 Subject: [PATCH 080/314] Typo fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3ef44a45..f25b37d9 100644 --- a/README.md +++ b/README.md @@ -135,7 +135,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi If you wish to contribute a new module, please see the documentation on the [nf-core website](https://nf-co.re/developers/adding_modules). -> Please be kind to our code reveiwers and submit one pull request per module :) +> Please be kind to our code reviewers and submit one pull request per module :) ## Help From 60d8bd7c04c190d920535f13f1d1f5ae1708d203 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Mon, 20 Sep 2021 13:59:05 -0500 Subject: [PATCH 081/314] Update pangolin to 3.1.11 (#721) * Update pangolin to 3.1.11. * Update md5sum for test.pangolin.csv. --- modules/pangolin/main.nf | 6 +++--- tests/modules/pangolin/test.yml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index d1417990..5639dd00 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -11,11 +11,11 @@ process PANGOLIN { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::pangolin=3.1.7' : null) + conda (params.enable_conda ? 'bioconda::pangolin=3.1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.7--pyhdfd78af_0' + container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' } else { - container 'quay.io/biocontainers/pangolin:3.1.7--pyhdfd78af_0' + container 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' } input: diff --git a/tests/modules/pangolin/test.yml b/tests/modules/pangolin/test.yml index 2b6b7553..5fb5e79e 100644 --- a/tests/modules/pangolin/test.yml +++ b/tests/modules/pangolin/test.yml @@ -4,4 +4,4 @@ - pangolin files: - path: ./output/pangolin/test.pangolin.csv - md5sum: 8daea6ca9fee7b747080d4d2b28a83d7 + md5sum: 4eaff46b5b11cd59fb44d4e8e7c4945e From b32c46c6a26d9a8249c59a9c7440cb2d06bd2bdd Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Mon, 20 Sep 2021 22:02:22 +0200 Subject: [PATCH 082/314] CAT CAT (#722) * bbmap/align done * Tests for single end and prebuilt index * Write bam file directly * Forgot to use all cpus for bbmap * Test md5sums * Added pigz support * Update modules/bbmap/align/meta.yml Co-authored-by: James A. Fellows Yates * process_medium and fastq * cat/cat module * Remove filter from CAT_CAT Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/cat/cat/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/cat/cat/main.nf | 39 +++++++++++++++++++ modules/cat/cat/meta.yml | 33 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/cat/cat/main.nf | 46 ++++++++++++++++++++++ tests/modules/cat/cat/test.yml | 33 ++++++++++++++++ 6 files changed, 223 insertions(+) create mode 100644 modules/cat/cat/functions.nf create mode 100644 modules/cat/cat/main.nf create mode 100644 modules/cat/cat/meta.yml create mode 100644 tests/modules/cat/cat/main.nf create mode 100644 tests/modules/cat/cat/test.yml diff --git a/modules/cat/cat/functions.nf b/modules/cat/cat/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/cat/cat/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf new file mode 100644 index 00000000..6a5ffe83 --- /dev/null +++ b/modules/cat/cat/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CAT_CAT { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "conda-forge::pigz=2.3.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pigz:2.3.4" + } else { + container "quay.io/biocontainers/pigz:2.3.4" + } + + input: + path files + + output: + path "file*" , emit: file + path "*.version.txt", emit: version + + script: + def software = getSoftwareName(task.process) + cpus = Math.floor(task.cpus/2).toInteger() + + // Use options.suffix if specified, otherwise .out; add .gz if first input file has it + suffix = options.suffix ? "${options.suffix}" : ".out" + suffix += files[0].name =~ /\.gz/ ? '.gz' : '' + + """ + cat ${options.args} $files ${options.args2} > file${suffix} + cat --version | grep 'GNU coreutils' | sed 's/cat (GNU coreutils) //' > ${software}.version.txt + """ +} diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml new file mode 100644 index 00000000..22dc29fc --- /dev/null +++ b/modules/cat/cat/meta.yml @@ -0,0 +1,33 @@ +name: cat_cat +description: A module for concatenation of gzipped or uncompressed files, optionally filtering the output +keywords: + - concatenate + - gzip + - filter +tools: + - cat: + description: Just concatenation + homepage: None + documentation: None + tool_dev_url: None + doi: "" + licence: "" + +input: + - files: + type: file + description: Gzipped or not files + pattern: "*" + +output: + - version: + type: file + description: File containing version of the pigz software + pattern: "*.{version.txt}" + - bam: + type: file + description: Concatenated, optionally filtered, file, gzipped if input was, otherwise not + pattern: "file*" + +authors: + - "@erikrikarddaniel" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index cf8f731c..c9178d85 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -230,6 +230,10 @@ bwameth/index: - modules/bwameth/index/** - tests/modules/bwameth/index/** +cat/cat: + - modules/cat/cat/** + - tests/modules/cat/cat/** + cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf new file mode 100644 index 00000000..abf039b3 --- /dev/null +++ b/tests/modules/cat/cat/main.nf @@ -0,0 +1,46 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' addParams( options: [:] ) +include { CAT_CAT as CAT_CAT_SUFFIX } from '../../../../modules/cat/cat/main.nf' addParams( options: [suffix: ".fna"] ) + +workflow test_cat_ungzipped { + + input = [ + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) + ] + + CAT_CAT ( input ) +} + +workflow test_cat_gzipped { + + input = [ + file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) + ] + + CAT_CAT ( input ) +} + +workflow test_cat_ungzipped_fna { + + input = [ + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) + ] + + CAT_CAT_SUFFIX ( input ) +} + +workflow test_cat_gzipped_fna { + + input = [ + file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) + ] + + CAT_CAT_SUFFIX ( input ) +} diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml new file mode 100644 index 00000000..084408b5 --- /dev/null +++ b/tests/modules/cat/cat/test.yml @@ -0,0 +1,33 @@ +- name: cat ungzipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_ungzipped -c tests/config/nextflow.config + tags: + - cat + - cat/cat + files: + - path: output/cat/file.out + md5sum: f44b33a0e441ad58b2d3700270e2dbe2 + +- name: cat gzipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_gzipped -c tests/config/nextflow.config + tags: + - cat + - cat/cat + files: + - path: output/cat/file.out.gz + +- name: cat ungzipped suffix + command: nextflow run ./tests/modules/cat/cat -entry test_cat_ungzipped_fna -c tests/config/nextflow.config + tags: + - cat + - cat/cat + files: + - path: output/cat/file.fna + md5sum: f44b33a0e441ad58b2d3700270e2dbe2 + +- name: cat gzipped suffix + command: nextflow run ./tests/modules/cat/cat -entry test_cat_gzipped_fna -c tests/config/nextflow.config + tags: + - cat + - cat/cat + files: + - path: output/cat/file.fna.gz From 1d6f47ce549f7a540f85c37c61029b25d6e3f149 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 21 Sep 2021 11:17:00 +0100 Subject: [PATCH 083/314] Refactor cat/cat module (#723) * Refactor cat/cat module * Change gzip terminology --- modules/cat/cat/main.nf | 36 +++++++++++++++++++++---------- modules/cat/cat/meta.yml | 21 +++++++++--------- tests/modules/cat/cat/main.nf | 39 +++++++++++++++++----------------- tests/modules/cat/cat/test.yml | 26 +++++++++++------------ 4 files changed, 68 insertions(+), 54 deletions(-) diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 6a5ffe83..1c7dbd7c 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -18,22 +18,36 @@ process CAT_CAT { } input: - path files + path files_in + val file_out output: - path "file*" , emit: file + path "${file_out}*" , emit: file_out path "*.version.txt", emit: version script: - def software = getSoftwareName(task.process) - cpus = Math.floor(task.cpus/2).toInteger() + def file_list = files_in.collect { it.toString() } + if (file_list.size > 1) { - // Use options.suffix if specified, otherwise .out; add .gz if first input file has it - suffix = options.suffix ? "${options.suffix}" : ".out" - suffix += files[0].name =~ /\.gz/ ? '.gz' : '' + // | input | output | command1 | command2 | + // |-----------|------------|----------|----------| + // | gzipped | gzipped | cat | | + // | ungzipped | ungzipped | cat | | + // | gzipped | ungzipped | zcat | | + // | ungzipped | gzipped | cat | pigz | - """ - cat ${options.args} $files ${options.args2} > file${suffix} - cat --version | grep 'GNU coreutils' | sed 's/cat (GNU coreutils) //' > ${software}.version.txt - """ + def in_zip = file_list[0].endsWith('.gz') + def out_zip = file_out.endsWith('.gz') + def command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' + def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $options.args2" : '' + """ + $command1 \\ + $options.args \\ + ${file_list.join(' ')} \\ + $command2 \\ + > $file_out + + echo \$(pigz --version 2>&1) | sed 's/pigz //g' > pigz.version.txt + """ + } } diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml index 22dc29fc..a1318b19 100644 --- a/modules/cat/cat/meta.yml +++ b/modules/cat/cat/meta.yml @@ -1,33 +1,34 @@ name: cat_cat -description: A module for concatenation of gzipped or uncompressed files, optionally filtering the output +description: A module for concatenation of gzipped or uncompressed files keywords: - concatenate - gzip - - filter + - cat tools: - cat: description: Just concatenation homepage: None - documentation: None + documentation: https://man7.org/linux/man-pages/man1/cat.1.html tool_dev_url: None - doi: "" - licence: "" input: - - files: + - files_in: type: file - description: Gzipped or not files + description: List of compressed / uncompressed files pattern: "*" + - file_out: + type: value + description: Full name of output file with or without .gz extension output: - version: type: file description: File containing version of the pigz software pattern: "*.{version.txt}" - - bam: + - file_out: type: file - description: Concatenated, optionally filtered, file, gzipped if input was, otherwise not - pattern: "file*" + description: Concatenated file. Will be gzipped if file_out ends with ".gz" + pattern: "${file_out}" authors: - "@erikrikarddaniel" diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf index abf039b3..a110a8ab 100644 --- a/tests/modules/cat/cat/main.nf +++ b/tests/modules/cat/cat/main.nf @@ -3,44 +3,43 @@ nextflow.enable.dsl = 2 include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' addParams( options: [:] ) -include { CAT_CAT as CAT_CAT_SUFFIX } from '../../../../modules/cat/cat/main.nf' addParams( options: [suffix: ".fna"] ) -workflow test_cat_ungzipped { - +workflow test_cat_unzipped_unzipped { + input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) ] - CAT_CAT ( input ) + CAT_CAT ( input, 'cat.txt' ) } -workflow test_cat_gzipped { - +workflow test_cat_zipped_zipped { + input = [ file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) ] - CAT_CAT ( input ) + CAT_CAT ( input, 'cat.txt.gz' ) } -workflow test_cat_ungzipped_fna { - +workflow test_cat_zipped_unzipped { + + input = [ + file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) + ] + + CAT_CAT ( input, 'cat.txt' ) +} + +workflow test_cat_unzipped_zipped { + input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) ] - CAT_CAT_SUFFIX ( input ) -} - -workflow test_cat_gzipped_fna { - - input = [ - file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) - ] - - CAT_CAT_SUFFIX ( input ) + CAT_CAT ( input, 'cat.txt.gz' ) } diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index 084408b5..2f234a01 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -1,33 +1,33 @@ -- name: cat ungzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_ungzipped -c tests/config/nextflow.config +- name: cat unzipped unzipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c tests/config/nextflow.config tags: - cat - cat/cat files: - - path: output/cat/file.out + - path: output/cat/cat.txt md5sum: f44b33a0e441ad58b2d3700270e2dbe2 -- name: cat gzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_gzipped -c tests/config/nextflow.config +- name: cat zipped zipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c tests/config/nextflow.config tags: - cat - cat/cat files: - - path: output/cat/file.out.gz + - path: output/cat/cat.txt.gz -- name: cat ungzipped suffix - command: nextflow run ./tests/modules/cat/cat -entry test_cat_ungzipped_fna -c tests/config/nextflow.config +- name: cat zipped unzipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c tests/config/nextflow.config tags: - cat - cat/cat files: - - path: output/cat/file.fna - md5sum: f44b33a0e441ad58b2d3700270e2dbe2 + - path: output/cat/cat.txt + md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 -- name: cat gzipped suffix - command: nextflow run ./tests/modules/cat/cat -entry test_cat_gzipped_fna -c tests/config/nextflow.config +- name: cat unzipped zipped + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c tests/config/nextflow.config tags: - cat - cat/cat files: - - path: output/cat/file.fna.gz + - path: output/cat/cat.txt.gz From 7830a4a80c80da176607def52a8555ed90e4ed9b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 21 Sep 2021 20:57:27 +0200 Subject: [PATCH 084/314] add: MALTEXTRACT (#725) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Start maltextract module * start tests * Get tests working now we have test data * Apply suggestions from code review Co-authored-by: Harshil Patel * Changes after review * Update tests/modules/maltextract/main.nf Co-authored-by: Jose Espinosa-Carrasco * Update tests/modules/maltextract/main.nf Co-authored-by: Jose Espinosa-Carrasco * Update tests/modules/maltextract/main.nf Co-authored-by: Harshil Patel Co-authored-by: Jose Espinosa-Carrasco --- modules/maltextract/functions.nf | 68 ++++++++++++++++++++++++++++++ modules/maltextract/main.nf | 44 +++++++++++++++++++ modules/maltextract/meta.yml | 51 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 +- tests/modules/maltextract/main.nf | 27 ++++++++++++ tests/modules/maltextract/test.yml | 11 +++++ 7 files changed, 208 insertions(+), 1 deletion(-) create mode 100644 modules/maltextract/functions.nf create mode 100644 modules/maltextract/main.nf create mode 100644 modules/maltextract/meta.yml create mode 100644 tests/modules/maltextract/main.nf create mode 100644 tests/modules/maltextract/test.yml diff --git a/modules/maltextract/functions.nf b/modules/maltextract/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/maltextract/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf new file mode 100644 index 00000000..d7402cb8 --- /dev/null +++ b/modules/maltextract/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MALTEXTRACT { + + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::hops=0.35" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" + } else { + container "quay.io/biocontainers/hops:0.35--hdfd78af_1" + } + + input: + path rma6 + path taxon_list + path ncbi_dir + + output: + path "results" , emit: results + path "*.version.txt", emit: version + + script: + def software = getSoftwareName(task.process) + """ + MaltExtract \\ + -Xmx${task.memory.toGiga()}g \\ + -p $task.cpus \\ + -i ${rma6.join(' ')} \\ + -t $taxon_list \\ + -r $ncbi_dir \\ + -o results/ \\ + $options.args + + echo \$(MaltExtract --help | head -n 2 | tail -n 1) | sed 's/MaltExtract version//' > ${software}.version.txt + """ +} diff --git a/modules/maltextract/meta.yml b/modules/maltextract/meta.yml new file mode 100644 index 00000000..3cb20fa2 --- /dev/null +++ b/modules/maltextract/meta.yml @@ -0,0 +1,51 @@ +name: maltextract +description: Tool for evaluation of MALT results for true positives of ancient metagenomic taxonomic screening +keywords: + - malt + - MaltExtract + - HOPS + - alignment + - metagenomics + - ancient DNA + - aDNA + - palaeogenomics + - archaeogenomics + - microbiome + - authentication + - damage + - edit distance +tools: + - maltextract: + description: Java tool to work with ancient metagenomics + homepage: https://github.com/rhuebler/hops + documentation: https://github.com/rhuebler/hops + tool_dev_url: https://github.com/rhuebler/hops + doi: "https://doi.org/10.1186/s13059-019-1903-0" + licence: ['GPL 3'] + +input: + - rma6: + type: file + description: RMA6 files from MALT + pattern: "*.rma6" + - taxon_list: + type: file + description: List of target taxa to evaluate + pattern: "*.txt" + - ncbi_dir: + type: directory + description: Directory containing NCBI taxonomy map and tre files + pattern: "${ncbi_dir}/" + +output: + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - results: + type: directory + description: Directory containing MaltExtract text results files + pattern: "*.rma6" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c9178d85..ce1c219a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -562,6 +562,10 @@ malt/run: - modules/malt/run/** - tests/modules/malt/run/** +maltextract: + - modules/maltextract/** + - tests/modules/maltextract/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 9854999d..eda747e0 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -7,7 +7,6 @@ params { 'genome' { genome_fasta = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta" genome_fasta_fai = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta.fai" - genome_fasta_zip = "${test_data_dir}/genomics/sarscov2/genome/genome.fasta.zip" genome_dict = "${test_data_dir}/genomics/sarscov2/genome/genome.dict" genome_gff3 = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3" genome_gff3_gz = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3.gz" @@ -25,6 +24,9 @@ params { kraken2 = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2" kraken2_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2.tar.gz" + ncbi_taxmap_zip = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/ncbi_taxmap.zip" + taxon_list_txt = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/taxon_list.txt" + all_sites_fas = "${test_data_dir}/genomics/sarscov2/genome/alignment/all_sites.fas" informative_sites_fas = "${test_data_dir}/genomics/sarscov2/genome/alignment/informative_sites.fas" diff --git a/tests/modules/maltextract/main.nf b/tests/modules/maltextract/main.nf new file mode 100644 index 00000000..d18923ca --- /dev/null +++ b/tests/modules/maltextract/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [:] ) + +workflow test_maltextract { + + fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + gff = [] + seq_type = "DNA" + map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + mode = "BlastN" + taxon_list = file(params.test_data['sarscov2']['genome']['taxon_list_txt'], checkIfExists: true) + ncbi_dir = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) + + UNZIP_MALT ( map_db ) + UNZIP_MALTEXTRACT ( ncbi_dir ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive ) + MALT_RUN ( input, mode, MALT_BUILD.out.index ) + MALTEXTRACT ( MALT_RUN.out.rma6, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive) +} diff --git a/tests/modules/maltextract/test.yml b/tests/modules/maltextract/test.yml new file mode 100644 index 00000000..87bf0182 --- /dev/null +++ b/tests/modules/maltextract/test.yml @@ -0,0 +1,11 @@ +- name: maltextract + command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c tests/config/nextflow.config + tags: + - maltextract + files: + - path: output/maltextract/results/error.txt + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/maltextract/results/error.txt + - path: output/maltextract/results/log.txt + contains: + - "INFO: Peak memory" From 0d53a34eed67b8e014f676c1923d47715f421ce7 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Tue, 21 Sep 2021 21:20:26 +0200 Subject: [PATCH 085/314] module fastani (#695) * initiate agrvate module * remove todos [ci skip] * initiate fastani draft [ci skip] * clean stubs [ci skip] * interim commit [ci skip] * accomodate the batch/per-sample processing [ci skip] * use the meta map [ci skip] * run first test [ci skip] * remove extra spaces [ci skip] * change output file name [ci skip] * update the expected output [ci skip] * update the files used for test [ci skip] * fix typo [ci skip] * test passing [ci skip] * update the description * remove extra files * accomodate CR suggestions [ci skip] Co-authored-by: Harshil Patel * accomodate CR suggestions [ci skip] Co-authored-by: Harshil Patel * accomodate CR suggestions [ci skip] Co-authored-by: Harshil Patel * use meta map for batch analysis * fix the tests * rely upon tuples * Apply suggestions from code review * Update main.nf Co-authored-by: Harshil Patel --- modules/fastani/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/fastani/main.nf | 52 +++++++++++++++++++++++++ modules/fastani/meta.yml | 43 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fastani/main.nf | 13 +++++++ tests/modules/fastani/test.yml | 7 ++++ 6 files changed, 187 insertions(+) create mode 100644 modules/fastani/functions.nf create mode 100644 modules/fastani/main.nf create mode 100644 modules/fastani/meta.yml create mode 100644 tests/modules/fastani/main.nf create mode 100644 tests/modules/fastani/test.yml diff --git a/modules/fastani/functions.nf b/modules/fastani/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/fastani/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf new file mode 100644 index 00000000..11916a65 --- /dev/null +++ b/modules/fastani/main.nf @@ -0,0 +1,52 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FASTANI { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fastani=1.32" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0" + } else { + container "quay.io/biocontainers/fastani:1.32--he1c1bb9_0" + } + + input: + tuple val(meta), path(query) + path reference + + output: + tuple val(meta), path("*.ani.txt"), emit: ani + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if (meta.batch_input) { + """ + fastANI \\ + -ql $query \\ + -rl $reference \\ + -o ${prefix}.ani.txt + + echo \$(fastANI --version 2>&1) | sed 's/version//;' > ${software}.version.txt + """ + } else { + """ + fastANI \\ + -q $query \\ + -r $reference \\ + -o ${prefix}.ani.txt + + echo \$(fastANI --version 2>&1) | sed 's/version//;' > ${software}.version.txt + """ + } +} diff --git a/modules/fastani/meta.yml b/modules/fastani/meta.yml new file mode 100644 index 00000000..ed6be165 --- /dev/null +++ b/modules/fastani/meta.yml @@ -0,0 +1,43 @@ +name: fastani +description: write your description here +keywords: + - fastani +tools: + - fastani: + description: FastANI is developed for fast alignment-free computation of whole-genome Average Nucleotide Identity (ANI). + homepage: https://github.com/ParBLiSS/FastANI + documentation: https://github.com/ParBLiSS/FastANI + tool_dev_url: https://github.com/ParBLiSS/FastANI + doi: 10.1038/s41467-018-07641-9 + licence: ['Apache-2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - query: + type: file + description: Fasta file(s) to be queried + pattern: "*.fasta" + - reference: + type: file + description: Fasta file(s) to be used as reference for the query + pattern: "*.fasta" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ani: + type: file + description: Results of the query + pattern: "*.ani.txt" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ce1c219a..90218461 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -318,6 +318,10 @@ expansionhunter: - modules/expansionhunter/** - tests/modules/expansionhunter/** +fastani: + - modules/fastani/** + - tests/modules/fastani/** + fastp: - modules/fastp/** - tests/modules/fastp/** diff --git a/tests/modules/fastani/main.nf b/tests/modules/fastani/main.nf new file mode 100644 index 00000000..a5548e20 --- /dev/null +++ b/tests/modules/fastani/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FASTANI } from '../../../modules/fastani/main.nf' addParams( options: [:] ) + +workflow test_fastani { + + query = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + reference = file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) + + FASTANI ( [[ id:'test' ], query], reference ) +} diff --git a/tests/modules/fastani/test.yml b/tests/modules/fastani/test.yml new file mode 100644 index 00000000..cd411d06 --- /dev/null +++ b/tests/modules/fastani/test.yml @@ -0,0 +1,7 @@ +- name: fastani + command: nextflow run ./tests/modules/fastani -entry test_fastani -c tests/config/nextflow.config + tags: + - fastani + files: + - path: output/fastani/test.ani.txt + md5sum: 31d4f04e8cffe13080c86db3f9f3a589 From 8e9d6cd5e43ff1f4bd8314f52e989d7b2c92625f Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 21 Sep 2021 13:28:53 -0600 Subject: [PATCH 086/314] Update agrvate to latest version (#728) The new version of agrvate fixes an issue where sample names with a `.` (dot) in there names caused the name to be truncated https://github.com/VishnuRaghuram94/AgrVATE/releases/tag/v1.0.1 Co-authored-by: Harshil Patel --- modules/agrvate/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index bbbd9fa0..8f504927 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -11,11 +11,11 @@ process AGRVATE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::agrvate=1.0" : null) + conda (params.enable_conda ? "bioconda::agrvate=1.0.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/agrvate:1.0.1--hdfd78af_0" } else { - container "quay.io/biocontainers/agrvate:1.0--hdfd78af_0" + container "quay.io/biocontainers/agrvate:1.0.1--hdfd78af_0" } input: From 77a2895785587219ee2b9d0ea16fa0d71ad14f3a Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Tue, 21 Sep 2021 21:45:42 +0200 Subject: [PATCH 087/314] Add module `kleborate` (#711) * initial commit [ci skip] * remove todo from the module files [ci skip] * add a sample test case [ci skip] * push the latest work [ci skip] * bump kleborate build * test passing with the new build for kleborate [ci skip] * ready for review * Apply suggestions from code review Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/kleborate/functions.nf | 68 ++++++++++++++++++++++++++++++++ modules/kleborate/main.nf | 39 ++++++++++++++++++ modules/kleborate/meta.yml | 43 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/kleborate/main.nf | 18 +++++++++ tests/modules/kleborate/test.yml | 7 ++++ 6 files changed, 179 insertions(+) create mode 100644 modules/kleborate/functions.nf create mode 100644 modules/kleborate/main.nf create mode 100644 modules/kleborate/meta.yml create mode 100644 tests/modules/kleborate/main.nf create mode 100644 tests/modules/kleborate/test.yml diff --git a/modules/kleborate/functions.nf b/modules/kleborate/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/kleborate/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf new file mode 100644 index 00000000..ef7eab23 --- /dev/null +++ b/modules/kleborate/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KLEBORATE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::kleborate=2.1.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1" + } + + input: + tuple val(meta), path(fastas) + + output: + tuple val(meta), path("*.txt"), emit: txt + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + kleborate \\ + $options.args \\ + --outfile ${prefix}.results.txt \\ + --assemblies *.fasta + + echo \$(kleborate -v 2>&1) | sed 's/kleborate //;' > ${software}.version.txt + """ +} diff --git a/modules/kleborate/meta.yml b/modules/kleborate/meta.yml new file mode 100644 index 00000000..19643033 --- /dev/null +++ b/modules/kleborate/meta.yml @@ -0,0 +1,43 @@ +name: kleborate +description: Kleborate is a tool to screen genome assemblies of Klebsiella pneumoniae and the Klebsiella pneumoniae species complex (KpSC). +keywords: + - screening assemblies + - Klebsiella pneumoniae +tools: + - kleborate: + description: Screening Klebsiella genome assemblies for MLST, sub-species, and other Klebsiella related genes of interest + homepage: https://github.com/katholt/Kleborate + documentation: https://github.com/katholt/Kleborate/wiki + tool_dev_url: https://github.com/katholt/Kleborate + doi: 10.1038/s41467-021-24448-3 + licence: ['GPL v3 or later (GPL v3+)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fastas: + type: files + description: Klebsiella genome assemblies to be screened + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - txt: + type: file + description: Result file generated after screening + pattern: "*.txt" + +authors: + - "@abhi18av" + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 90218461..43974ff7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -505,6 +505,10 @@ kallistobustools/ref: - modules/kallistobustools/ref/** - tests/modules/kallistobustools/ref/** +kleborate: + - modules/kleborate/** + - tests/modules/kleborate/** + kraken2/kraken2: - modules/kraken2/kraken2/** - modules/untar/** diff --git a/tests/modules/kleborate/main.nf b/tests/modules/kleborate/main.nf new file mode 100644 index 00000000..f846e642 --- /dev/null +++ b/tests/modules/kleborate/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KLEBORATE } from '../../../modules/kleborate/main.nf' addParams( options: [:] ) + +workflow test_kleborate { + + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['scaffolds_fasta'], checkIfExists: true) + ] + ] + + KLEBORATE ( input ) +} diff --git a/tests/modules/kleborate/test.yml b/tests/modules/kleborate/test.yml new file mode 100644 index 00000000..1bee4708 --- /dev/null +++ b/tests/modules/kleborate/test.yml @@ -0,0 +1,7 @@ +- name: kleborate + command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c tests/config/nextflow.config + tags: + - kleborate + files: + - path: output/kleborate/test.results.txt + md5sum: b7979a71170736098fb8403cd92748f5 From 5758e9f451e2eb089e777f398c2ec32cff28b3d2 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 22 Sep 2021 12:02:32 +0200 Subject: [PATCH 088/314] Unzip update (#730) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Change test dataset --- tests/modules/unzip/main.nf | 2 +- tests/modules/unzip/test.yml | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf index b7f668b1..b5b208be 100644 --- a/tests/modules/unzip/main.nf +++ b/tests/modules/unzip/main.nf @@ -6,7 +6,7 @@ include { UNZIP } from '../../../modules/unzip/main.nf' addParams( options: [:] workflow test_unzip { - archive = file(params.test_data['sarscov2']['genome']['genome_fasta_zip'], checkIfExists: true) + archive = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) UNZIP ( archive ) } diff --git a/tests/modules/unzip/test.yml b/tests/modules/unzip/test.yml index 93066eb0..1b0b1a97 100644 --- a/tests/modules/unzip/test.yml +++ b/tests/modules/unzip/test.yml @@ -3,5 +3,8 @@ tags: - unzip files: - - path: output/unzip/genome.fasta/genome.fasta - md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/unzip/ncbi_taxmap/ + - path: output/unzip/ncbi_taxmap/ncbi.map + md5sum: de30dbba85f9070612b632e2a5a95952 + - path: output/unzip/ncbi_taxmap/ncbi.tre + md5sum: 4029dd2091c685b9a86ddd9d0d870db0 From 25943a4c23c6ab585e740599df246b66078e966a Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Wed, 22 Sep 2021 14:31:01 +0200 Subject: [PATCH 089/314] Add glnexus (#729) * Add glnexus * Fix lint error * Refactor * Suggested changes Co-authored-by: Harshil Patel --- modules/glnexus/functions.nf | 68 +++++++++++++++++++++++++++++++++ modules/glnexus/main.nf | 49 ++++++++++++++++++++++++ modules/glnexus/meta.yml | 36 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/glnexus/main.nf | 13 +++++++ tests/modules/glnexus/test.yml | 7 ++++ 6 files changed, 177 insertions(+) create mode 100644 modules/glnexus/functions.nf create mode 100644 modules/glnexus/main.nf create mode 100644 modules/glnexus/meta.yml create mode 100644 tests/modules/glnexus/main.nf create mode 100644 tests/modules/glnexus/test.yml diff --git a/modules/glnexus/functions.nf b/modules/glnexus/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/glnexus/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf new file mode 100644 index 00000000..dadb9d60 --- /dev/null +++ b/modules/glnexus/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GLNEXUS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::glnexus=1.4.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0" + } else { + container "quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0" + } + + input: + tuple val(meta), path(gvcfs) + + output: + tuple val(meta), path("*.bcf"), emit: bcf + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + // Make list of GVCFs to merge + def input = gvcfs.collect { it.toString() } + def avail_mem = 3 + if (!task.memory) { + log.info '[Glnexus] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + glnexus_cli \\ + --threads $task.cpus \\ + --mem-gbytes $avail_mem \\ + $options.args \\ + ${input.join(' ')} \\ + > ${prefix}.bcf + echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release //; s/ .*\$//' > ${software}.version.txt + """ +} diff --git a/modules/glnexus/meta.yml b/modules/glnexus/meta.yml new file mode 100644 index 00000000..f64a812e --- /dev/null +++ b/modules/glnexus/meta.yml @@ -0,0 +1,36 @@ +name: glnexus +description: merge gVCF files and perform joint variant calling +keywords: + - merge + - gvcf +tools: + - glnexus: + description: scalable gVCF merging and joint variant calling for population sequencing projects. + homepage: https://github.com/dnanexus-rnd/GLnexus + documentation: https://github.com/dnanexus-rnd/GLnexus/wiki/Getting-Started + tool_dev_url: None + doi: https://doi.org/10.1101/343970 + licence: ['Apache License 2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - gvcfs: + type: list + description: Input genomic vcf files + pattern: "*.{gvcf,gvcf.gz,g.vcf,g.vcf.gz}" + +output: + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - bcf: + type: file + description: merged BCF file + pattern: "*.bcf" +authors: + - "@ramprasadn" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 43974ff7..5307d684 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -422,6 +422,10 @@ gffread: - modules/gffread/** - tests/modules/gffread/** +glnexus: + - modules/glnexus/** + - tests/modules/glnexus/** + graphmap2/align: - modules/graphmap2/align/** - tests/modules/graphmap2/align/** diff --git a/tests/modules/glnexus/main.nf b/tests/modules/glnexus/main.nf new file mode 100644 index 00000000..2a79b2fa --- /dev/null +++ b/tests/modules/glnexus/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GLNEXUS } from '../../../modules/glnexus/main.nf' addParams( options: [:] ) + +workflow test_glnexus { + input = [ [ id:'test' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) ] + ] + GLNEXUS ( input ) +} diff --git a/tests/modules/glnexus/test.yml b/tests/modules/glnexus/test.yml new file mode 100644 index 00000000..c7b255ee --- /dev/null +++ b/tests/modules/glnexus/test.yml @@ -0,0 +1,7 @@ +- name: glnexus test_glnexus + command: nextflow run tests/modules/glnexus -entry test_glnexus -c tests/config/nextflow.config + tags: + - glnexus + files: + - path: output/glnexus/test.bcf + md5sum: 33ac8c9f3ff54e6a23177ba94a449173 From ca53f7525b20bc021f37a8f841ba979fdd6aef49 Mon Sep 17 00:00:00 2001 From: Stephen Ficklin Date: Thu, 23 Sep 2021 01:13:04 -0700 Subject: [PATCH 090/314] Diamond (#710) * Added diamond * minor tweaks & yml fix * Fixed spacing issues due to failing lint * Update modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/makedb/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/makedb/main.nf Co-authored-by: Harshil Patel * Update tests/modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update tests/modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update tests/modules/diamond/blastx/main.nf Co-authored-by: Harshil Patel * Update tests/modules/diamond/blastp/main.nf Co-authored-by: Harshil Patel * Update modules/diamond/blastp/meta.yml Co-authored-by: Harshil Patel Co-authored-by: Harshil Patel --- modules/diamond/blastp/functions.nf | 68 +++++++++++++++++++++++++++ modules/diamond/blastp/main.nf | 47 ++++++++++++++++++ modules/diamond/blastp/meta.yml | 43 +++++++++++++++++ modules/diamond/blastx/functions.nf | 68 +++++++++++++++++++++++++++ modules/diamond/blastx/main.nf | 47 ++++++++++++++++++ modules/diamond/blastx/meta.yml | 43 +++++++++++++++++ modules/diamond/makedb/functions.nf | 68 +++++++++++++++++++++++++++ modules/diamond/makedb/main.nf | 42 +++++++++++++++++ modules/diamond/makedb/meta.yml | 34 ++++++++++++++ tests/config/pytest_modules.yml | 12 +++++ tests/modules/diamond/blastp/main.nf | 15 ++++++ tests/modules/diamond/blastp/test.yml | 8 ++++ tests/modules/diamond/blastx/main.nf | 15 ++++++ tests/modules/diamond/blastx/test.yml | 8 ++++ tests/modules/diamond/makedb/main.nf | 12 +++++ tests/modules/diamond/makedb/test.yml | 8 ++++ 16 files changed, 538 insertions(+) create mode 100644 modules/diamond/blastp/functions.nf create mode 100644 modules/diamond/blastp/main.nf create mode 100644 modules/diamond/blastp/meta.yml create mode 100644 modules/diamond/blastx/functions.nf create mode 100644 modules/diamond/blastx/main.nf create mode 100644 modules/diamond/blastx/meta.yml create mode 100644 modules/diamond/makedb/functions.nf create mode 100644 modules/diamond/makedb/main.nf create mode 100644 modules/diamond/makedb/meta.yml create mode 100644 tests/modules/diamond/blastp/main.nf create mode 100644 tests/modules/diamond/blastp/test.yml create mode 100644 tests/modules/diamond/blastx/main.nf create mode 100644 tests/modules/diamond/blastx/test.yml create mode 100644 tests/modules/diamond/makedb/main.nf create mode 100644 tests/modules/diamond/makedb/test.yml diff --git a/modules/diamond/blastp/functions.nf b/modules/diamond/blastp/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/diamond/blastp/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf new file mode 100644 index 00000000..88ace780 --- /dev/null +++ b/modules/diamond/blastp/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DIAMOND_BLASTP { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + // Dimaond is limited to v2.0.9 because there is not a + // singularity version higher than this at the current time. + conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' + } else { + container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" + } + + input: + tuple val(meta), path(fasta) + path db + + output: + tuple val(meta), path('*.txt'), emit: txt + path '*.version.txt' , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` + + diamond \\ + blastp \\ + --threads $task.cpus \\ + --db \$DB \\ + --query $fasta \\ + $options.args \\ + --out ${prefix}.txt + + echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + """ +} diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml new file mode 100644 index 00000000..b6e82f95 --- /dev/null +++ b/modules/diamond/blastp/meta.yml @@ -0,0 +1,43 @@ +name: diamond_blastp +description: Queries a DIAMOND database using blastp mode +keywords: + - fasta + - diamond + - blastp + - DNA sequence +tools: + - diamond: + description: Accelerated BLAST compatible local sequence aligner + homepage: https://github.com/bbuchfink/diamond + documentation: https://github.com/bbuchfink/diamond/wiki + tool_dev_url: https://github.com/bbuchfink/diamond + doi: "doi:10.1038/s41592-021-01101-x" + licence: ['GPL v3.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Input fasta file containing query sequences + pattern: "*.{fa,fasta}" + - db: + type: directory + description: Directory containing the protein blast database + pattern: "*" + +output: + - txt: + type: file + description: File containing blastp hits + pattern: "*.{blastp.txt}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@spficklin" diff --git a/modules/diamond/blastx/functions.nf b/modules/diamond/blastx/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/diamond/blastx/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf new file mode 100644 index 00000000..cd9e4838 --- /dev/null +++ b/modules/diamond/blastx/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DIAMOND_BLASTX { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + // Dimaond is limited to v2.0.9 because there is not a + // singularity version higher than this at the current time. + conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' + } else { + container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" + } + + input: + tuple val(meta), path(fasta) + path db + + output: + tuple val(meta), path('*.txt'), emit: txt + path '*.version.txt' , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` + + diamond \\ + blastx \\ + --threads $task.cpus \\ + --db \$DB \\ + --query $fasta \\ + $options.args \\ + --out ${prefix}.txt + + echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + """ +} diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml new file mode 100644 index 00000000..d9670bed --- /dev/null +++ b/modules/diamond/blastx/meta.yml @@ -0,0 +1,43 @@ +name: diamond_blastx +description: Queries a DIAMOND database using blastx mode +keywords: + - fasta + - diamond + - blastx + - DNA sequence +tools: + - diamond: + description: Accelerated BLAST compatible local sequence aligner + homepage: https://github.com/bbuchfink/diamond + documentation: https://github.com/bbuchfink/diamond/wiki + tool_dev_url: https://github.com/bbuchfink/diamond + doi: "doi:10.1038/s41592-021-01101-x" + licence: ['GPL v3.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Input fasta file containing query sequences + pattern: "*.{fa,fasta}" + - db: + type: directory + description: Directory containing the nucelotide blast database + pattern: "*" + +output: + - txt: + type: file + description: File containing blastx hits + pattern: "*.{blastx.txt}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@spficklin" diff --git a/modules/diamond/makedb/functions.nf b/modules/diamond/makedb/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/diamond/makedb/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf new file mode 100644 index 00000000..3537d0aa --- /dev/null +++ b/modules/diamond/makedb/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DIAMOND_MAKEDB { + tag "$fasta" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + // Dimaond is limited to v2.0.9 because there is not a + // singularity version higher than this at the current time. + conda (params.enable_conda ? 'bioconda::diamond=2.0.9' : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' + } else { + container 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' + } + + input: + path fasta + + output: + path "${fasta}.dmnd", emit: db + path '*.version.txt', emit: version + + script: + def software = getSoftwareName(task.process) + """ + diamond \\ + makedb \\ + --threads $task.cpus \\ + --in $fasta \\ + -d $fasta \\ + $options.args + + echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + """ +} diff --git a/modules/diamond/makedb/meta.yml b/modules/diamond/makedb/meta.yml new file mode 100644 index 00000000..edb63fab --- /dev/null +++ b/modules/diamond/makedb/meta.yml @@ -0,0 +1,34 @@ +name: diamond_makedb +description: Builds a DIAMOND database +keywords: + - fasta + - diamond + - index + - database +tools: + - diamond: + description: Accelerated BLAST compatible local sequence aligner + homepage: https://github.com/bbuchfink/diamond + documentation: https://github.com/bbuchfink/diamond/wiki + tool_dev_url: https://github.com/bbuchfink/diamond + doi: "doi:10.1038/s41592-021-01101-x" + licence: ['GPL v3.0'] + +input: + - fasta: + type: file + description: Input fasta file + pattern: "*.{fa,fasta}" + +output: + - db: + type: file + description: File of the indexed DIAMOND database + pattern: "*.{dmnd}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@spficklin" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 5307d684..74673511 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -286,6 +286,18 @@ delly/call: - modules/delly/call/** - tests/modules/delly/call/** +diamond/blastx: + - modules/diamond/blastx/** + - tests/modules/diamond/blastx/** + +diamond/blastp: + - modules/diamond/blastp/** + - tests/modules/diamond/blastp/** + +diamond/makedb: + - modules/diamond/makedb/** + - tests/modules/diamond/makedb/** + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf new file mode 100644 index 00000000..ab131a86 --- /dev/null +++ b/tests/modules/diamond/blastp/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) +include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' addParams( options: [ suffix: '.diamond_blastp' ] ) + +workflow test_diamond_blastp { + + db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + DIAMOND_MAKEDB ( db ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db ) +} diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml new file mode 100644 index 00000000..ae62ea51 --- /dev/null +++ b/tests/modules/diamond/blastp/test.yml @@ -0,0 +1,8 @@ +- name: diamond blastp + command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config + tags: + - diamond + - diamond/blastp + files: + - path: ./output/diamond/test.diamond_blastp.txt + md5sum: 3ca7f6290c1d8741c573370e6f8b4db0 diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf new file mode 100644 index 00000000..c0e437d7 --- /dev/null +++ b/tests/modules/diamond/blastx/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) +include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' addParams( options: [ suffix: '.diamond_blastx' ] ) + +workflow test_diamond_blastx { + + db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + DIAMOND_MAKEDB ( db ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db ) +} diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml new file mode 100644 index 00000000..91a6eb4f --- /dev/null +++ b/tests/modules/diamond/blastx/test.yml @@ -0,0 +1,8 @@ +- name: diamond blastx + command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c tests/config/nextflow.config + tags: + - diamond + - diamond/blastx + files: + - path: ./output/diamond/test.diamond_blastx.txt + md5sum: d41d8cd98f00b204e9800998ecf8427e diff --git a/tests/modules/diamond/makedb/main.nf b/tests/modules/diamond/makedb/main.nf new file mode 100644 index 00000000..bcd7691e --- /dev/null +++ b/tests/modules/diamond/makedb/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) + +workflow test_diamond_makedb { + + input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + DIAMOND_MAKEDB ( input ) +} diff --git a/tests/modules/diamond/makedb/test.yml b/tests/modules/diamond/makedb/test.yml new file mode 100644 index 00000000..335b571f --- /dev/null +++ b/tests/modules/diamond/makedb/test.yml @@ -0,0 +1,8 @@ +- name: diamond makedb test_diamond_makedb + command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c tests/config/nextflow.config + tags: + - diamond + - diamond/makedb + files: + - path: output/diamond/genome.fasta.dmnd + md5sum: 2447fb376394c20d43ea3aad2aa5d15d From ab67a1d41b63bf52fd7c147f7f8f6e8d167590b5 Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Fri, 24 Sep 2021 11:01:54 +0200 Subject: [PATCH 091/314] Update fastqc to produce multi-version versions.yml (#665) * Update fastqc to produce multi-version versions.yml * Update readme and pull request template * Fix markdownlint * remove variable * Change publish dir to lowercase * Re-add getSoftwareName * Add custom pytest-workflow test to ensure versions.yml is valid * Add docstring * Remove __init__.py as it is not needed * Remove changes to README, since this part went to nf-co.re * Add NF_CORE_TEST env var * Fix editorconfig * Add additional consistency checks for versions.yml * Update multiqc module * Fix output channel --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/workflows/pytest-workflow.yml | 2 +- .gitignore | 3 ++ modules/fastqc/functions.nf | 58 ++++++++++++++++----------- modules/fastqc/main.nf | 17 +++++--- modules/fastqc/meta.yml | 2 +- modules/multiqc/functions.nf | 58 ++++++++++++++++----------- modules/multiqc/main.nf | 10 +++-- modules/multiqc/meta.yml | 2 +- tests/test_versions_yml.py | 40 ++++++++++++++++++ 10 files changed, 134 insertions(+), 60 deletions(-) create mode 100644 tests/test_versions_yml.py diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 059133d6..b9f7a4e8 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -20,7 +20,7 @@ Closes #XXX - [ ] If you've added a new tool - have you followed the module conventions in the [contribution docs](https://github.com/nf-core/modules/tree/master/.github/CONTRIBUTING.md) - [ ] If necessary, include test data in your PR. - [ ] Remove all TODO statements. -- [ ] Emit the `.version.txt` file. +- [ ] Emit the `versions.yml` file. - [ ] Follow the naming conventions. - [ ] Follow the parameters requirements. - [ ] Follow the input/output options guidelines. diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 43f48c36..9cd768a8 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -89,7 +89,7 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - name: Upload logs on failure if: failure() diff --git a/.gitignore b/.gitignore index 71b9b179..9d982e3f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,7 @@ output/ *.code-workspace .screenrc .*.sw? +__pycache__ +*.pyo +*.pyc tests/data/ diff --git a/modules/fastqc/functions.nf b/modules/fastqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/fastqc/functions.nf +++ b/modules/fastqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 39c327b2..88bfbf5b 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,24 +24,31 @@ process FASTQC { output: tuple val(meta), path("*.html"), emit: html tuple val(meta), path("*.zip") , emit: zip - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: // Add soft-links to original FastQs for consistent naming in pipeline - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } } diff --git a/modules/fastqc/meta.yml b/modules/fastqc/meta.yml index 8eb9953d..48031356 100644 --- a/modules/fastqc/meta.yml +++ b/modules/fastqc/meta.yml @@ -43,7 +43,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/modules/multiqc/functions.nf b/modules/multiqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/multiqc/functions.nf +++ b/modules/multiqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 8b6d6f0c..2e7ad932 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,12 +24,16 @@ process MULTIQC { path "*multiqc_report.html", emit: report path "*_data" , emit: data path "*_plots" , optional:true, emit: plots - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ multiqc -f $options.args . - multiqc --version | sed -e "s/multiqc, version //g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS """ } diff --git a/modules/multiqc/meta.yml b/modules/multiqc/meta.yml index 532a8bb1..2d99ec0d 100644 --- a/modules/multiqc/meta.yml +++ b/modules/multiqc/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/tests/test_versions_yml.py b/tests/test_versions_yml.py new file mode 100644 index 00000000..5d44c7a0 --- /dev/null +++ b/tests/test_versions_yml.py @@ -0,0 +1,40 @@ +from pathlib import Path +import pytest +import yaml +import re + + +def _get_workflow_names(): + """Get all names of all workflows which have a test.yml in the tests directory. + + To do so, recursively finds all test.yml files and parses their content. + """ + here = Path(__file__).parent.resolve() + pytest_workflow_files = here.glob("**/test.yml") + for f in pytest_workflow_files: + test_config = yaml.safe_load(f.read_text()) + for workflow in test_config: + yield workflow["name"] + + +@pytest.mark.workflow(*_get_workflow_names()) +def test_ensure_valid_version_yml(workflow_dir): + workflow_dir = Path(workflow_dir) + software_name = workflow_dir.name.split("_")[0].lower() + versions_yml = (workflow_dir / f"output/{software_name}/versions.yml").read_text() + + assert ( + "END_VERSIONS" not in versions_yml + ), "END_VERSIONS detected in versions.yml. END_VERSIONS being in the text is a sign of an ill-formatted HEREDOC" + + # Raises an exception if yaml is not valid + versions = yaml.safe_load(versions_yml) + try: + software_versions = versions[software_name.upper()] + except KeyError: + raise AssertionError("There is no entry `` in versions.yml. ") + assert len(software_versions), "There must be at least one version emitted." + for tool, version in software_versions.items(): + assert re.match( + r"^\d+.*", str(version) + ), f"Version number for {tool} must start with a number. " From 3d0b87358405df71fc4214e9e9700bca7728aab7 Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Mon, 27 Sep 2021 08:03:30 +0200 Subject: [PATCH 092/314] Run CI only on pull-requests and on the master-branch. (#743) * Run CI only on pull-requests and on the master-branch. * also for the other workflows --- .github/workflows/code-linting.yml | 7 ++++++- .github/workflows/nf-core-linting.yml | 7 ++++++- .github/workflows/pytest-workflow.yml | 6 +++++- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/.github/workflows/code-linting.yml b/.github/workflows/code-linting.yml index f20e7a61..d15c4af6 100644 --- a/.github/workflows/code-linting.yml +++ b/.github/workflows/code-linting.yml @@ -1,5 +1,10 @@ name: Code Linting -on: [push, pull_request] +on: + push: + branches: [master] + pull_request: + branches: [master] + jobs: Markdown: diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index df5ba60d..263b36b3 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -1,7 +1,12 @@ name: nf-core linting # This workflow is triggered on pushes and PRs to the repository. # It runs the `nf-core lint` tests to ensure that the module code meets the nf-core guidelines -on: [push, pull_request] +on: + push: + branches: [master] + pull_request: + branches: [master] + jobs: changes: diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 9cd768a8..7cbb2689 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -1,5 +1,9 @@ name: Pytest-workflow -on: [push, pull_request] +on: + push: + branches: [master] + pull_request: + branches: [master] jobs: changes: From d73a988ff7279a1873ee41c9526c09a66681f75d Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Mon, 27 Sep 2021 10:40:50 +0200 Subject: [PATCH 093/314] Cover case when processes have been imported under different name (#744) Co-authored-by: Harshil Patel --- tests/test_versions_yml.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_versions_yml.py b/tests/test_versions_yml.py index 5d44c7a0..c3944a0f 100644 --- a/tests/test_versions_yml.py +++ b/tests/test_versions_yml.py @@ -25,14 +25,12 @@ def test_ensure_valid_version_yml(workflow_dir): assert ( "END_VERSIONS" not in versions_yml - ), "END_VERSIONS detected in versions.yml. END_VERSIONS being in the text is a sign of an ill-formatted HEREDOC" + ), "END_VERSIONS detected in versions.yml. This is a sign of an ill-formatted HEREDOC" # Raises an exception if yaml is not valid versions = yaml.safe_load(versions_yml) - try: - software_versions = versions[software_name.upper()] - except KeyError: - raise AssertionError("There is no entry `` in versions.yml. ") + assert len(versions) == 1, "The top-level of versions.yml must contain exactely one entry: the process name as dict key" + software_versions = next(iter(versions.values())) assert len(software_versions), "There must be at least one version emitted." for tool, version in software_versions.items(): assert re.match( From 906577873b66253b0d244871bfec2eeeaff73053 Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Mon, 27 Sep 2021 10:41:24 +0200 Subject: [PATCH 094/314] Bulk update modules to use versions.yml (#739) * New functions.nf * Convert code to create versions.yml * Update meta.yml * update output channel * Fix more meta.yml * Manually update remaining modules * remove superflous echo * Fix misformatted meta.yml files * Fix yaml, was list instead of dict * fix version for bcftools Co-authored-by: Harshil Patel --- modules/abacas/functions.nf | 58 +++++++++++-------- modules/abacas/main.nf | 9 ++- modules/abacas/meta.yml | 2 +- modules/adapterremoval/functions.nf | 58 +++++++++++-------- modules/adapterremoval/main.nf | 19 ++++-- modules/adapterremoval/meta.yml | 2 +- modules/agrvate/functions.nf | 58 +++++++++++-------- modules/agrvate/main.nf | 9 ++- modules/agrvate/meta.yml | 2 +- modules/allelecounter/functions.nf | 58 +++++++++++-------- modules/allelecounter/main.nf | 9 ++- modules/allelecounter/meta.yml | 2 +- modules/arriba/functions.nf | 58 +++++++++++-------- modules/arriba/main.nf | 9 ++- modules/arriba/meta.yml | 2 +- modules/artic/guppyplex/functions.nf | 58 +++++++++++-------- modules/artic/guppyplex/main.nf | 9 ++- modules/artic/guppyplex/meta.yml | 2 +- modules/artic/minion/functions.nf | 58 +++++++++++-------- modules/artic/minion/main.nf | 9 ++- modules/artic/minion/meta.yml | 2 +- modules/bamaligncleaner/functions.nf | 58 +++++++++++-------- modules/bamaligncleaner/main.nf | 9 ++- modules/bamaligncleaner/meta.yml | 2 +- modules/bandage/image/functions.nf | 58 +++++++++++-------- modules/bandage/image/main.nf | 9 ++- modules/bandage/image/meta.yml | 2 +- modules/bbmap/align/functions.nf | 58 +++++++++++-------- modules/bbmap/align/main.nf | 9 ++- modules/bbmap/align/meta.yml | 2 +- modules/bbmap/bbduk/functions.nf | 58 +++++++++++-------- modules/bbmap/bbduk/main.nf | 9 ++- modules/bbmap/bbduk/meta.yml | 2 +- modules/bbmap/index/functions.nf | 58 +++++++++++-------- modules/bbmap/index/main.nf | 9 ++- modules/bbmap/index/meta.yml | 2 +- modules/bcftools/concat/functions.nf | 58 +++++++++++-------- modules/bcftools/concat/main.nf | 9 ++- modules/bcftools/concat/meta.yml | 2 +- modules/bcftools/consensus/functions.nf | 58 +++++++++++-------- modules/bcftools/consensus/main.nf | 9 ++- modules/bcftools/consensus/meta.yml | 2 +- modules/bcftools/filter/functions.nf | 58 +++++++++++-------- modules/bcftools/filter/main.nf | 9 ++- modules/bcftools/filter/meta.yml | 2 +- modules/bcftools/isec/functions.nf | 58 +++++++++++-------- modules/bcftools/isec/main.nf | 9 ++- modules/bcftools/isec/meta.yml | 2 +- modules/bcftools/merge/functions.nf | 58 +++++++++++-------- modules/bcftools/merge/main.nf | 9 ++- modules/bcftools/merge/meta.yml | 2 +- modules/bcftools/mpileup/functions.nf | 58 +++++++++++-------- modules/bcftools/mpileup/main.nf | 9 ++- modules/bcftools/mpileup/meta.yml | 2 +- modules/bcftools/norm/functions.nf | 58 +++++++++++-------- modules/bcftools/norm/main.nf | 9 ++- modules/bcftools/norm/meta.yml | 2 +- modules/bcftools/query/functions.nf | 58 +++++++++++-------- modules/bcftools/query/main.nf | 9 ++- modules/bcftools/query/meta.yml | 2 +- modules/bcftools/reheader/functions.nf | 58 +++++++++++-------- modules/bcftools/reheader/main.nf | 9 ++- modules/bcftools/reheader/meta.yml | 2 +- modules/bcftools/stats/functions.nf | 58 +++++++++++-------- modules/bcftools/stats/main.nf | 9 ++- modules/bcftools/stats/meta.yml | 2 +- modules/bcftools/view/functions.nf | 58 +++++++++++-------- modules/bcftools/view/main.nf | 9 ++- modules/bcftools/view/meta.yml | 2 +- modules/bedtools/bamtobed/functions.nf | 58 +++++++++++-------- modules/bedtools/bamtobed/main.nf | 9 ++- modules/bedtools/bamtobed/meta.yml | 2 +- modules/bedtools/complement/functions.nf | 58 +++++++++++-------- modules/bedtools/complement/main.nf | 9 ++- modules/bedtools/complement/meta.yml | 2 +- modules/bedtools/genomecov/functions.nf | 58 +++++++++++-------- modules/bedtools/genomecov/main.nf | 14 +++-- modules/bedtools/genomecov/meta.yml | 2 +- modules/bedtools/getfasta/functions.nf | 58 +++++++++++-------- modules/bedtools/getfasta/main.nf | 9 ++- modules/bedtools/getfasta/meta.yml | 2 +- modules/bedtools/intersect/functions.nf | 58 +++++++++++-------- modules/bedtools/intersect/main.nf | 9 ++- modules/bedtools/intersect/meta.yml | 2 +- modules/bedtools/makewindows/functions.nf | 58 +++++++++++-------- modules/bedtools/makewindows/main.nf | 9 ++- modules/bedtools/makewindows/meta.yml | 2 +- modules/bedtools/maskfasta/functions.nf | 58 +++++++++++-------- modules/bedtools/maskfasta/main.nf | 9 ++- modules/bedtools/maskfasta/meta.yml | 2 +- modules/bedtools/merge/functions.nf | 58 +++++++++++-------- modules/bedtools/merge/main.nf | 9 ++- modules/bedtools/merge/meta.yml | 2 +- modules/bedtools/slop/functions.nf | 58 +++++++++++-------- modules/bedtools/slop/main.nf | 9 ++- modules/bedtools/slop/meta.yml | 2 +- modules/bedtools/sort/functions.nf | 58 +++++++++++-------- modules/bedtools/sort/main.nf | 9 ++- modules/bedtools/sort/meta.yml | 2 +- modules/bedtools/subtract/functions.nf | 58 +++++++++++-------- modules/bedtools/subtract/main.nf | 9 ++- modules/bedtools/subtract/meta.yml | 2 +- modules/bismark/align/functions.nf | 58 +++++++++++-------- modules/bismark/align/main.nf | 9 ++- modules/bismark/align/meta.yml | 2 +- modules/bismark/deduplicate/functions.nf | 58 +++++++++++-------- modules/bismark/deduplicate/main.nf | 9 ++- modules/bismark/deduplicate/meta.yml | 2 +- .../bismark/genomepreparation/functions.nf | 58 +++++++++++-------- modules/bismark/genomepreparation/main.nf | 9 ++- modules/bismark/genomepreparation/meta.yml | 2 +- .../bismark/methylationextractor/functions.nf | 58 +++++++++++-------- modules/bismark/methylationextractor/main.nf | 9 ++- modules/bismark/methylationextractor/meta.yml | 2 +- modules/bismark/report/functions.nf | 58 +++++++++++-------- modules/bismark/report/main.nf | 9 ++- modules/bismark/report/meta.yml | 2 +- modules/bismark/summary/functions.nf | 58 +++++++++++-------- modules/bismark/summary/main.nf | 9 ++- modules/bismark/summary/meta.yml | 2 +- modules/blast/blastn/functions.nf | 58 +++++++++++-------- modules/blast/blastn/main.nf | 9 ++- modules/blast/blastn/meta.yml | 2 +- modules/blast/makeblastdb/functions.nf | 58 +++++++++++-------- modules/blast/makeblastdb/main.nf | 9 ++- modules/blast/makeblastdb/meta.yml | 2 +- modules/bowtie/align/functions.nf | 58 +++++++++++-------- modules/bowtie/align/main.nf | 9 ++- modules/bowtie/align/meta.yml | 2 +- modules/bowtie/build/functions.nf | 58 +++++++++++-------- modules/bowtie/build/main.nf | 9 ++- modules/bowtie/build/meta.yml | 2 +- modules/bowtie2/align/functions.nf | 58 +++++++++++-------- modules/bowtie2/align/main.nf | 14 +++-- modules/bowtie2/align/meta.yml | 2 +- modules/bowtie2/build/functions.nf | 58 +++++++++++-------- modules/bowtie2/build/main.nf | 9 ++- modules/bowtie2/build/meta.yml | 2 +- modules/bwa/aln/functions.nf | 58 +++++++++++-------- modules/bwa/aln/main.nf | 14 +++-- modules/bwa/aln/meta.yml | 2 +- modules/bwa/index/functions.nf | 58 +++++++++++-------- modules/bwa/index/main.nf | 9 ++- modules/bwa/index/meta.yml | 2 +- modules/bwa/mem/functions.nf | 58 +++++++++++-------- modules/bwa/mem/main.nf | 9 ++- modules/bwa/mem/meta.yml | 2 +- modules/bwa/sampe/functions.nf | 58 +++++++++++-------- modules/bwa/sampe/main.nf | 9 ++- modules/bwa/sampe/meta.yml | 2 +- modules/bwa/samse/functions.nf | 58 +++++++++++-------- modules/bwa/samse/main.nf | 9 ++- modules/bwa/samse/meta.yml | 2 +- modules/bwamem2/index/functions.nf | 58 +++++++++++-------- modules/bwamem2/index/main.nf | 9 ++- modules/bwamem2/index/meta.yml | 2 +- modules/bwamem2/mem/functions.nf | 58 +++++++++++-------- modules/bwamem2/mem/main.nf | 9 ++- modules/bwamem2/mem/meta.yml | 2 +- modules/bwameth/align/functions.nf | 58 +++++++++++-------- modules/bwameth/align/main.nf | 9 ++- modules/bwameth/align/meta.yml | 2 +- modules/bwameth/index/functions.nf | 58 +++++++++++-------- modules/bwameth/index/main.nf | 9 ++- modules/bwameth/index/meta.yml | 2 +- modules/cat/cat/functions.nf | 58 +++++++++++-------- modules/cat/cat/main.nf | 9 ++- modules/cat/cat/meta.yml | 2 +- modules/cat/fastq/functions.nf | 58 +++++++++++-------- modules/chromap/chromap/functions.nf | 58 +++++++++++-------- modules/chromap/chromap/main.nf | 14 +++-- modules/chromap/chromap/meta.yml | 2 +- modules/chromap/index/functions.nf | 58 +++++++++++-------- modules/chromap/index/main.nf | 9 ++- modules/chromap/index/meta.yml | 2 +- modules/cnvkit/functions.nf | 58 +++++++++++-------- modules/cnvkit/main.nf | 9 ++- modules/cnvkit/meta.yml | 2 +- modules/cooler/digest/functions.nf | 58 +++++++++++-------- modules/cooler/digest/main.nf | 9 ++- modules/cooler/digest/meta.yml | 2 +- modules/cooler/dump/functions.nf | 58 +++++++++++-------- modules/cooler/dump/main.nf | 9 ++- modules/cooler/dump/meta.yml | 2 +- modules/cutadapt/functions.nf | 58 +++++++++++-------- modules/cutadapt/main.nf | 9 ++- modules/cutadapt/meta.yml | 2 +- modules/damageprofiler/functions.nf | 58 +++++++++++-------- modules/damageprofiler/main.nf | 9 ++- modules/damageprofiler/meta.yml | 2 +- modules/deeptools/computematrix/functions.nf | 58 +++++++++++-------- modules/deeptools/computematrix/main.nf | 9 ++- modules/deeptools/computematrix/meta.yml | 2 +- .../deeptools/plotfingerprint/functions.nf | 58 +++++++++++-------- modules/deeptools/plotfingerprint/main.nf | 9 ++- modules/deeptools/plotfingerprint/meta.yml | 2 +- modules/deeptools/plotheatmap/functions.nf | 58 +++++++++++-------- modules/deeptools/plotheatmap/main.nf | 9 ++- modules/deeptools/plotheatmap/meta.yml | 2 +- modules/deeptools/plotprofile/functions.nf | 58 +++++++++++-------- modules/deeptools/plotprofile/main.nf | 9 ++- modules/deeptools/plotprofile/meta.yml | 2 +- modules/delly/call/functions.nf | 58 +++++++++++-------- modules/delly/call/main.nf | 9 ++- modules/delly/call/meta.yml | 2 +- modules/diamond/blastp/functions.nf | 58 +++++++++++-------- modules/diamond/blastp/main.nf | 9 ++- modules/diamond/blastp/meta.yml | 2 +- modules/diamond/blastx/functions.nf | 58 +++++++++++-------- modules/diamond/blastx/main.nf | 9 ++- modules/diamond/blastx/meta.yml | 2 +- modules/diamond/makedb/functions.nf | 58 +++++++++++-------- modules/diamond/makedb/main.nf | 9 ++- modules/diamond/makedb/meta.yml | 2 +- modules/dragonflye/functions.nf | 58 +++++++++++-------- modules/dragonflye/main.nf | 9 ++- modules/dragonflye/meta.yml | 2 +- modules/dshbio/exportsegments/functions.nf | 58 +++++++++++-------- modules/dshbio/exportsegments/main.nf | 9 ++- modules/dshbio/exportsegments/meta.yml | 2 +- modules/dshbio/filterbed/functions.nf | 58 +++++++++++-------- modules/dshbio/filterbed/main.nf | 9 ++- modules/dshbio/filterbed/meta.yml | 2 +- modules/dshbio/filtergff3/functions.nf | 58 +++++++++++-------- modules/dshbio/filtergff3/main.nf | 9 ++- modules/dshbio/filtergff3/meta.yml | 2 +- modules/dshbio/splitbed/functions.nf | 58 +++++++++++-------- modules/dshbio/splitbed/main.nf | 9 ++- modules/dshbio/splitbed/meta.yml | 2 +- modules/dshbio/splitgff3/functions.nf | 58 +++++++++++-------- modules/dshbio/splitgff3/main.nf | 9 ++- modules/dshbio/splitgff3/meta.yml | 2 +- modules/ensemblvep/functions.nf | 58 +++++++++++-------- modules/ensemblvep/main.nf | 9 ++- modules/ensemblvep/meta.yml | 2 +- modules/expansionhunter/functions.nf | 58 +++++++++++-------- modules/expansionhunter/main.nf | 9 ++- modules/expansionhunter/meta.yml | 2 +- modules/fastani/functions.nf | 58 +++++++++++-------- modules/fastani/main.nf | 14 +++-- modules/fastani/meta.yml | 2 +- modules/fastp/functions.nf | 58 +++++++++++-------- modules/fastp/main.nf | 14 +++-- modules/fastp/meta.yml | 2 +- modules/fasttree/functions.nf | 58 +++++++++++-------- modules/fasttree/main.nf | 9 ++- modules/fasttree/meta.yml | 2 +- .../callmolecularconsensusreads/functions.nf | 58 +++++++++++-------- .../fgbio/callmolecularconsensusreads/main.nf | 9 ++- .../callmolecularconsensusreads/meta.yml | 2 +- modules/fgbio/sortbam/functions.nf | 58 +++++++++++-------- modules/fgbio/sortbam/main.nf | 9 ++- modules/fgbio/sortbam/meta.yml | 2 +- modules/flash/functions.nf | 58 +++++++++++-------- modules/flash/main.nf | 9 ++- modules/flash/meta.yml | 2 +- modules/gatk4/applybqsr/functions.nf | 58 +++++++++++-------- modules/gatk4/applybqsr/main.nf | 9 ++- modules/gatk4/applybqsr/meta.yml | 2 +- modules/gatk4/baserecalibrator/functions.nf | 58 +++++++++++-------- modules/gatk4/baserecalibrator/main.nf | 9 ++- modules/gatk4/baserecalibrator/meta.yml | 2 +- modules/gatk4/bedtointervallist/functions.nf | 58 +++++++++++-------- modules/gatk4/bedtointervallist/main.nf | 9 ++- modules/gatk4/bedtointervallist/meta.yml | 2 +- .../createsequencedictionary/functions.nf | 58 +++++++++++-------- .../gatk4/createsequencedictionary/main.nf | 9 ++- .../gatk4/createsequencedictionary/meta.yml | 2 +- modules/gatk4/fastqtosam/functions.nf | 58 +++++++++++-------- modules/gatk4/fastqtosam/main.nf | 9 ++- modules/gatk4/fastqtosam/meta.yml | 2 +- modules/gatk4/getpileupsummaries/functions.nf | 58 +++++++++++-------- modules/gatk4/getpileupsummaries/main.nf | 9 ++- modules/gatk4/getpileupsummaries/meta.yml | 2 +- modules/gatk4/haplotypecaller/functions.nf | 58 +++++++++++-------- modules/gatk4/haplotypecaller/main.nf | 9 ++- modules/gatk4/haplotypecaller/meta.yml | 2 +- modules/gatk4/intervallisttools/functions.nf | 58 +++++++++++-------- modules/gatk4/intervallisttools/main.nf | 9 ++- modules/gatk4/intervallisttools/meta.yml | 2 +- modules/gatk4/markduplicates/functions.nf | 58 +++++++++++-------- modules/gatk4/markduplicates/main.nf | 9 ++- modules/gatk4/markduplicates/meta.yml | 2 +- modules/gatk4/mergebamalignment/functions.nf | 58 +++++++++++-------- modules/gatk4/mergebamalignment/main.nf | 9 ++- modules/gatk4/mergebamalignment/meta.yml | 2 +- modules/gatk4/mergevcfs/functions.nf | 58 +++++++++++-------- modules/gatk4/mergevcfs/main.nf | 9 ++- modules/gatk4/mergevcfs/meta.yml | 2 +- modules/gatk4/mutect2/functions.nf | 58 +++++++++++-------- modules/gatk4/mutect2/main.nf | 9 ++- modules/gatk4/mutect2/meta.yml | 2 +- modules/gatk4/revertsam/functions.nf | 58 +++++++++++-------- modules/gatk4/revertsam/main.nf | 9 ++- modules/gatk4/revertsam/meta.yml | 2 +- modules/gatk4/samtofastq/functions.nf | 58 +++++++++++-------- modules/gatk4/samtofastq/main.nf | 9 ++- modules/gatk4/samtofastq/meta.yml | 2 +- modules/gatk4/splitncigarreads/functions.nf | 58 +++++++++++-------- modules/gatk4/splitncigarreads/main.nf | 9 ++- modules/gatk4/splitncigarreads/meta.yml | 2 +- modules/gatk4/variantfiltration/functions.nf | 58 +++++++++++-------- modules/gatk4/variantfiltration/main.nf | 9 ++- modules/gatk4/variantfiltration/meta.yml | 2 +- modules/genmap/index/functions.nf | 58 +++++++++++-------- modules/genmap/index/main.nf | 9 ++- modules/genmap/index/meta.yml | 2 +- modules/genmap/mappability/functions.nf | 58 +++++++++++-------- modules/genmap/mappability/main.nf | 9 ++- modules/genmap/mappability/meta.yml | 2 +- modules/gffread/functions.nf | 58 +++++++++++-------- modules/gffread/main.nf | 9 ++- modules/gffread/meta.yml | 2 +- modules/glnexus/functions.nf | 58 +++++++++++-------- modules/glnexus/main.nf | 9 ++- modules/glnexus/meta.yml | 2 +- modules/graphmap2/align/functions.nf | 58 +++++++++++-------- modules/graphmap2/align/main.nf | 9 ++- modules/graphmap2/align/meta.yml | 2 +- modules/graphmap2/index/functions.nf | 58 +++++++++++-------- modules/graphmap2/index/main.nf | 9 ++- modules/graphmap2/index/meta.yml | 2 +- modules/gubbins/functions.nf | 58 +++++++++++-------- modules/gubbins/main.nf | 9 ++- modules/gubbins/meta.yml | 2 +- modules/gunzip/functions.nf | 58 +++++++++++-------- modules/gunzip/main.nf | 9 ++- modules/gunzip/meta.yml | 2 +- modules/hifiasm/functions.nf | 58 +++++++++++-------- modules/hifiasm/main.nf | 14 +++-- modules/hifiasm/meta.yml | 2 +- modules/hisat2/align/functions.nf | 58 +++++++++++-------- modules/hisat2/align/main.nf | 14 +++-- modules/hisat2/align/meta.yml | 2 +- modules/hisat2/build/functions.nf | 58 +++++++++++-------- modules/hisat2/build/main.nf | 9 ++- modules/hisat2/build/meta.yml | 2 +- .../hisat2/extractsplicesites/functions.nf | 58 +++++++++++-------- modules/hisat2/extractsplicesites/main.nf | 9 ++- modules/hisat2/extractsplicesites/meta.yml | 2 +- modules/hmmer/hmmalign/functions.nf | 58 +++++++++++-------- modules/hmmer/hmmalign/main.nf | 9 ++- modules/hmmer/hmmalign/meta.yml | 2 +- modules/homer/annotatepeaks/functions.nf | 58 +++++++++++-------- modules/homer/annotatepeaks/main.nf | 9 ++- modules/homer/annotatepeaks/meta.yml | 2 +- modules/homer/findpeaks/functions.nf | 58 +++++++++++-------- modules/homer/findpeaks/main.nf | 9 ++- modules/homer/findpeaks/meta.yml | 2 +- modules/homer/maketagdirectory/functions.nf | 58 +++++++++++-------- modules/homer/maketagdirectory/main.nf | 9 ++- modules/homer/maketagdirectory/meta.yml | 2 +- modules/homer/makeucscfile/functions.nf | 58 +++++++++++-------- modules/homer/makeucscfile/main.nf | 9 ++- modules/homer/makeucscfile/meta.yml | 2 +- modules/iqtree/functions.nf | 58 +++++++++++-------- modules/iqtree/main.nf | 9 ++- modules/iqtree/meta.yml | 2 +- modules/ivar/consensus/functions.nf | 58 +++++++++++-------- modules/ivar/consensus/main.nf | 9 ++- modules/ivar/consensus/meta.yml | 2 +- modules/ivar/trim/functions.nf | 58 +++++++++++-------- modules/ivar/trim/main.nf | 9 ++- modules/ivar/trim/meta.yml | 2 +- modules/ivar/variants/functions.nf | 58 +++++++++++-------- modules/ivar/variants/main.nf | 9 ++- modules/ivar/variants/meta.yml | 2 +- modules/kallisto/index/functions.nf | 58 +++++++++++-------- modules/kallisto/index/main.nf | 9 ++- modules/kallisto/index/meta.yml | 2 +- modules/kallistobustools/count/functions.nf | 58 +++++++++++-------- modules/kallistobustools/count/main.nf | 9 ++- modules/kallistobustools/count/meta.yml | 2 +- modules/kallistobustools/ref/functions.nf | 58 +++++++++++-------- modules/kallistobustools/ref/main.nf | 14 +++-- modules/kallistobustools/ref/meta.yml | 2 +- modules/kleborate/functions.nf | 58 +++++++++++-------- modules/kleborate/main.nf | 9 ++- modules/kleborate/meta.yml | 2 +- modules/kraken2/kraken2/functions.nf | 58 +++++++++++-------- modules/kraken2/kraken2/main.nf | 9 ++- modules/kraken2/kraken2/meta.yml | 2 +- modules/last/dotplot/functions.nf | 58 +++++++++++-------- modules/last/dotplot/main.nf | 9 ++- modules/last/dotplot/meta.yml | 2 +- modules/last/lastal/functions.nf | 58 +++++++++++-------- modules/last/lastal/main.nf | 9 ++- modules/last/lastal/meta.yml | 2 +- modules/last/lastdb/functions.nf | 58 +++++++++++-------- modules/last/lastdb/main.nf | 9 ++- modules/last/lastdb/meta.yml | 2 +- modules/last/mafconvert/functions.nf | 58 +++++++++++-------- modules/last/mafconvert/main.nf | 9 ++- modules/last/mafconvert/meta.yml | 2 +- modules/last/mafswap/functions.nf | 58 +++++++++++-------- modules/last/mafswap/main.nf | 9 ++- modules/last/mafswap/meta.yml | 2 +- modules/last/postmask/functions.nf | 58 +++++++++++-------- modules/last/postmask/main.nf | 9 ++- modules/last/postmask/meta.yml | 2 +- modules/last/split/functions.nf | 58 +++++++++++-------- modules/last/split/main.nf | 9 ++- modules/last/split/meta.yml | 2 +- modules/last/train/functions.nf | 58 +++++++++++-------- modules/last/train/main.nf | 9 ++- modules/last/train/meta.yml | 2 +- modules/lib/functions.nf | 58 +++++++++++-------- modules/lofreq/call/functions.nf | 58 +++++++++++-------- modules/lofreq/call/main.nf | 9 ++- modules/lofreq/call/meta.yml | 2 +- modules/lofreq/callparallel/functions.nf | 58 +++++++++++-------- modules/lofreq/callparallel/main.nf | 9 ++- modules/lofreq/callparallel/meta.yml | 2 +- modules/lofreq/filter/functions.nf | 58 +++++++++++-------- modules/lofreq/filter/main.nf | 9 ++- modules/lofreq/filter/meta.yml | 2 +- modules/lofreq/indelqual/functions.nf | 58 +++++++++++-------- modules/lofreq/indelqual/main.nf | 9 ++- modules/lofreq/indelqual/meta.yml | 2 +- modules/macs2/callpeak/functions.nf | 58 +++++++++++-------- modules/macs2/callpeak/main.nf | 9 ++- modules/malt/build/functions.nf | 58 +++++++++++-------- modules/malt/build/main.nf | 9 ++- modules/malt/build/meta.yml | 2 +- modules/malt/run/functions.nf | 58 +++++++++++-------- modules/malt/run/main.nf | 9 ++- modules/malt/run/meta.yml | 2 +- modules/maltextract/functions.nf | 58 +++++++++++-------- modules/maltextract/main.nf | 9 ++- modules/maltextract/meta.yml | 2 +- modules/mash/sketch/functions.nf | 58 +++++++++++-------- modules/mash/sketch/main.nf | 9 ++- modules/metaphlan3/functions.nf | 58 +++++++++++-------- modules/metaphlan3/main.nf | 9 ++- modules/metaphlan3/meta.yml | 2 +- modules/methyldackel/extract/functions.nf | 58 +++++++++++-------- modules/methyldackel/extract/main.nf | 9 ++- modules/methyldackel/extract/meta.yml | 2 +- modules/methyldackel/mbias/functions.nf | 58 +++++++++++-------- modules/methyldackel/mbias/main.nf | 9 ++- modules/methyldackel/mbias/meta.yml | 2 +- modules/minia/functions.nf | 58 +++++++++++-------- modules/minia/main.nf | 9 ++- modules/minia/meta.yml | 2 +- modules/minimap2/align/functions.nf | 58 +++++++++++-------- modules/minimap2/align/main.nf | 9 ++- modules/minimap2/align/meta.yml | 2 +- modules/minimap2/index/functions.nf | 58 +++++++++++-------- modules/minimap2/index/main.nf | 9 ++- modules/minimap2/index/meta.yml | 2 +- modules/mosdepth/functions.nf | 58 +++++++++++-------- modules/mosdepth/main.nf | 9 ++- modules/mosdepth/meta.yml | 2 +- modules/msisensor/msi/functions.nf | 58 +++++++++++-------- modules/msisensor/msi/main.nf | 9 ++- modules/msisensor/msi/meta.yml | 2 +- modules/msisensor/scan/functions.nf | 58 +++++++++++-------- modules/msisensor/scan/main.nf | 9 ++- modules/msisensor/scan/meta.yml | 2 +- modules/muscle/functions.nf | 58 +++++++++++-------- modules/muscle/main.nf | 9 ++- modules/muscle/meta.yml | 2 +- modules/nanolyse/functions.nf | 58 +++++++++++-------- modules/nanolyse/main.nf | 9 ++- modules/nanolyse/meta.yml | 2 +- modules/nanoplot/functions.nf | 58 +++++++++++-------- modules/nanoplot/main.nf | 9 ++- modules/nanoplot/meta.yml | 2 +- modules/nextclade/functions.nf | 58 +++++++++++-------- modules/nextclade/main.nf | 9 ++- modules/nextclade/meta.yml | 2 +- modules/optitype/functions.nf | 58 +++++++++++-------- modules/optitype/main.nf | 9 ++- modules/optitype/meta.yml | 2 +- modules/pairix/functions.nf | 58 +++++++++++-------- modules/pairix/main.nf | 9 ++- modules/pairix/meta.yml | 2 +- modules/pairtools/dedup/functions.nf | 58 +++++++++++-------- modules/pairtools/dedup/main.nf | 9 ++- modules/pairtools/dedup/meta.yml | 2 +- modules/pairtools/flip/functions.nf | 58 +++++++++++-------- modules/pairtools/flip/main.nf | 9 ++- modules/pairtools/flip/meta.yml | 2 +- modules/pairtools/parse/functions.nf | 58 +++++++++++-------- modules/pairtools/parse/main.nf | 9 ++- modules/pairtools/parse/meta.yml | 2 +- modules/pairtools/restrict/functions.nf | 58 +++++++++++-------- modules/pairtools/restrict/main.nf | 9 ++- modules/pairtools/restrict/meta.yml | 2 +- modules/pairtools/select/functions.nf | 58 +++++++++++-------- modules/pairtools/select/main.nf | 9 ++- modules/pairtools/select/meta.yml | 2 +- modules/pairtools/sort/functions.nf | 58 +++++++++++-------- modules/pairtools/sort/main.nf | 9 ++- modules/pairtools/sort/meta.yml | 2 +- modules/pangolin/functions.nf | 58 +++++++++++-------- modules/pangolin/main.nf | 9 ++- modules/pangolin/meta.yml | 2 +- modules/pbccs/functions.nf | 58 +++++++++++-------- modules/pbccs/main.nf | 9 ++- modules/pbccs/meta.yml | 2 +- modules/phantompeakqualtools/functions.nf | 58 +++++++++++-------- modules/phantompeakqualtools/main.nf | 9 ++- .../collectmultiplemetrics/functions.nf | 58 +++++++++++-------- modules/picard/collectmultiplemetrics/main.nf | 9 ++- .../picard/collectmultiplemetrics/meta.yml | 2 +- modules/picard/collectwgsmetrics/functions.nf | 58 +++++++++++-------- modules/picard/collectwgsmetrics/main.nf | 9 ++- modules/picard/collectwgsmetrics/meta.yml | 2 +- modules/picard/filtersamreads/functions.nf | 58 +++++++++++-------- modules/picard/filtersamreads/main.nf | 14 +++-- modules/picard/filtersamreads/meta.yml | 2 +- modules/picard/markduplicates/functions.nf | 58 +++++++++++-------- modules/picard/markduplicates/main.nf | 9 ++- modules/picard/markduplicates/meta.yml | 2 +- modules/picard/mergesamfiles/functions.nf | 58 +++++++++++-------- modules/picard/mergesamfiles/main.nf | 14 +++-- modules/picard/mergesamfiles/meta.yml | 2 +- modules/picard/sortsam/functions.nf | 58 +++++++++++-------- modules/picard/sortsam/main.nf | 9 ++- modules/picard/sortsam/meta.yml | 2 +- modules/plasmidid/functions.nf | 58 +++++++++++-------- modules/plasmidid/main.nf | 9 ++- modules/plasmidid/meta.yml | 2 +- modules/plink/vcf/functions.nf | 58 +++++++++++-------- modules/plink/vcf/main.nf | 9 ++- modules/plink/vcf/meta.yml | 2 +- modules/preseq/lcextrap/functions.nf | 58 +++++++++++-------- modules/preseq/lcextrap/main.nf | 9 ++- modules/preseq/lcextrap/meta.yml | 2 +- modules/prodigal/functions.nf | 58 +++++++++++-------- modules/prodigal/main.nf | 9 ++- modules/prodigal/meta.yml | 2 +- modules/prokka/functions.nf | 58 +++++++++++-------- modules/prokka/main.nf | 9 ++- modules/prokka/meta.yml | 2 +- modules/pycoqc/functions.nf | 58 +++++++++++-------- modules/pycoqc/main.nf | 9 ++- modules/pycoqc/meta.yml | 2 +- modules/pydamage/analyze/functions.nf | 58 +++++++++++-------- modules/pydamage/analyze/main.nf | 9 ++- modules/pydamage/analyze/meta.yml | 2 +- modules/pydamage/filter/functions.nf | 58 +++++++++++-------- modules/pydamage/filter/main.nf | 9 ++- modules/pydamage/filter/meta.yml | 2 +- modules/qcat/functions.nf | 58 +++++++++++-------- modules/qcat/main.nf | 9 ++- modules/qcat/meta.yml | 2 +- modules/qualimap/bamqc/functions.nf | 58 +++++++++++-------- modules/qualimap/bamqc/main.nf | 9 ++- modules/qualimap/bamqc/meta.yml | 2 +- modules/qualimap/rnaseq/functions.nf | 58 +++++++++++-------- modules/qualimap/rnaseq/main.nf | 9 ++- modules/quast/functions.nf | 58 +++++++++++-------- modules/quast/main.nf | 9 ++- modules/quast/meta.yml | 2 +- modules/rapidnj/functions.nf | 58 +++++++++++-------- modules/rapidnj/main.nf | 9 ++- modules/rapidnj/meta.yml | 2 +- modules/rasusa/functions.nf | 58 +++++++++++-------- modules/rasusa/main.nf | 9 ++- modules/rasusa/meta.yml | 2 +- modules/raxmlng/functions.nf | 58 +++++++++++-------- modules/raxmlng/main.nf | 9 ++- modules/raxmlng/meta.yml | 2 +- modules/rsem/calculateexpression/functions.nf | 58 +++++++++++-------- modules/rsem/calculateexpression/main.nf | 9 ++- modules/rsem/calculateexpression/meta.yml | 2 +- modules/rsem/preparereference/functions.nf | 58 +++++++++++-------- modules/rsem/preparereference/main.nf | 14 +++-- modules/rsem/preparereference/meta.yml | 2 +- modules/rseqc/bamstat/functions.nf | 58 +++++++++++-------- modules/rseqc/bamstat/main.nf | 9 ++- modules/rseqc/bamstat/meta.yml | 2 +- modules/rseqc/inferexperiment/functions.nf | 58 +++++++++++-------- modules/rseqc/inferexperiment/main.nf | 9 ++- modules/rseqc/inferexperiment/meta.yml | 2 +- modules/rseqc/innerdistance/functions.nf | 58 +++++++++++-------- modules/rseqc/innerdistance/main.nf | 14 +++-- modules/rseqc/innerdistance/meta.yml | 2 +- modules/rseqc/junctionannotation/functions.nf | 58 +++++++++++-------- modules/rseqc/junctionannotation/main.nf | 9 ++- modules/rseqc/junctionannotation/meta.yml | 2 +- modules/rseqc/junctionsaturation/functions.nf | 58 +++++++++++-------- modules/rseqc/junctionsaturation/main.nf | 9 ++- modules/rseqc/junctionsaturation/meta.yml | 2 +- modules/rseqc/readdistribution/functions.nf | 58 +++++++++++-------- modules/rseqc/readdistribution/main.nf | 9 ++- modules/rseqc/readdistribution/meta.yml | 2 +- modules/rseqc/readduplication/functions.nf | 58 +++++++++++-------- modules/rseqc/readduplication/main.nf | 9 ++- modules/rseqc/readduplication/meta.yml | 2 +- modules/salmon/index/functions.nf | 58 +++++++++++-------- modules/salmon/index/main.nf | 9 ++- modules/salmon/index/meta.yml | 2 +- modules/salmon/quant/functions.nf | 58 +++++++++++-------- modules/salmon/quant/main.nf | 9 ++- modules/salmon/quant/meta.yml | 2 +- modules/samtools/ampliconclip/functions.nf | 58 +++++++++++-------- modules/samtools/ampliconclip/main.nf | 9 ++- modules/samtools/ampliconclip/meta.yml | 2 +- modules/samtools/faidx/functions.nf | 58 +++++++++++-------- modules/samtools/faidx/main.nf | 9 ++- modules/samtools/faidx/meta.yml | 2 +- modules/samtools/fastq/functions.nf | 58 +++++++++++-------- modules/samtools/fastq/main.nf | 9 ++- modules/samtools/fastq/meta.yml | 2 +- modules/samtools/flagstat/functions.nf | 58 +++++++++++-------- modules/samtools/flagstat/main.nf | 9 ++- modules/samtools/flagstat/meta.yml | 2 +- modules/samtools/idxstats/functions.nf | 58 +++++++++++-------- modules/samtools/idxstats/main.nf | 9 ++- modules/samtools/idxstats/meta.yml | 2 +- modules/samtools/index/functions.nf | 58 +++++++++++-------- modules/samtools/index/main.nf | 9 ++- modules/samtools/index/meta.yml | 2 +- modules/samtools/merge/functions.nf | 58 +++++++++++-------- modules/samtools/merge/main.nf | 9 ++- modules/samtools/merge/meta.yml | 2 +- modules/samtools/mpileup/functions.nf | 58 +++++++++++-------- modules/samtools/mpileup/main.nf | 9 ++- modules/samtools/mpileup/meta.yml | 2 +- modules/samtools/sort/functions.nf | 58 +++++++++++-------- modules/samtools/sort/main.nf | 9 ++- modules/samtools/sort/meta.yml | 2 +- modules/samtools/stats/functions.nf | 58 +++++++++++-------- modules/samtools/stats/main.nf | 9 ++- modules/samtools/stats/meta.yml | 2 +- modules/samtools/view/functions.nf | 58 +++++++++++-------- modules/samtools/view/main.nf | 9 ++- modules/samtools/view/meta.yml | 2 +- modules/seacr/callpeak/functions.nf | 58 +++++++++++-------- modules/seacr/callpeak/main.nf | 9 ++- modules/seacr/callpeak/meta.yml | 2 +- modules/seqkit/split2/functions.nf | 58 +++++++++++-------- modules/seqkit/split2/main.nf | 14 +++-- modules/seqkit/split2/meta.yml | 2 +- modules/seqtk/sample/functions.nf | 58 +++++++++++-------- modules/seqtk/sample/main.nf | 14 +++-- modules/seqtk/sample/meta.yml | 2 +- modules/seqtk/subseq/functions.nf | 58 +++++++++++-------- modules/seqtk/subseq/main.nf | 9 ++- modules/seqtk/subseq/meta.yml | 2 +- modules/sequenzautils/bam2seqz/functions.nf | 58 +++++++++++-------- modules/sequenzautils/bam2seqz/main.nf | 9 ++- modules/sequenzautils/bam2seqz/meta.yml | 2 +- modules/sequenzautils/gcwiggle/functions.nf | 58 +++++++++++-------- modules/sequenzautils/gcwiggle/main.nf | 9 ++- modules/sequenzautils/gcwiggle/meta.yml | 2 +- modules/seqwish/induce/functions.nf | 58 +++++++++++-------- modules/seqwish/induce/main.nf | 9 ++- modules/seqwish/induce/meta.yml | 2 +- modules/shovill/functions.nf | 58 +++++++++++-------- modules/shovill/main.nf | 9 ++- modules/shovill/meta.yml | 2 +- modules/snpdists/functions.nf | 58 +++++++++++-------- modules/snpdists/main.nf | 9 ++- modules/snpdists/meta.yml | 2 +- modules/snpeff/functions.nf | 58 +++++++++++-------- modules/snpeff/main.nf | 9 ++- modules/snpeff/meta.yml | 2 +- modules/snpsites/functions.nf | 58 +++++++++++-------- modules/snpsites/main.nf | 9 ++- modules/snpsites/meta.yml | 2 +- modules/sortmerna/functions.nf | 58 +++++++++++-------- modules/sortmerna/main.nf | 14 +++-- modules/spades/functions.nf | 58 +++++++++++-------- modules/spades/main.nf | 9 ++- modules/spades/meta.yml | 2 +- modules/staphopiasccmec/functions.nf | 58 +++++++++++-------- modules/staphopiasccmec/main.nf | 9 ++- modules/staphopiasccmec/meta.yml | 2 +- modules/star/align/functions.nf | 58 +++++++++++-------- modules/star/align/main.nf | 9 ++- modules/star/align/meta.yml | 2 +- modules/star/genomegenerate/functions.nf | 58 +++++++++++-------- modules/star/genomegenerate/main.nf | 14 +++-- modules/star/genomegenerate/meta.yml | 2 +- modules/strelka/germline/functions.nf | 58 +++++++++++-------- modules/strelka/germline/main.nf | 9 ++- modules/strelka/germline/meta.yml | 2 +- modules/stringtie/merge/functions.nf | 58 +++++++++++-------- modules/stringtie/merge/main.nf | 9 ++- modules/stringtie/stringtie/functions.nf | 58 +++++++++++-------- modules/stringtie/stringtie/main.nf | 9 ++- modules/stringtie/stringtie/meta.yml | 2 +- modules/subread/featurecounts/functions.nf | 58 +++++++++++-------- modules/subread/featurecounts/main.nf | 9 ++- modules/subread/featurecounts/meta.yml | 2 +- modules/tabix/bgzip/functions.nf | 58 +++++++++++-------- modules/tabix/bgzip/main.nf | 9 ++- modules/tabix/bgzip/meta.yml | 2 +- modules/tabix/bgziptabix/functions.nf | 58 +++++++++++-------- modules/tabix/bgziptabix/main.nf | 9 ++- modules/tabix/bgziptabix/meta.yml | 2 +- modules/tabix/tabix/functions.nf | 58 +++++++++++-------- modules/tabix/tabix/main.nf | 9 ++- modules/tabix/tabix/meta.yml | 2 +- modules/tiddit/sv/functions.nf | 58 +++++++++++-------- modules/tiddit/sv/main.nf | 9 ++- modules/tiddit/sv/meta.yml | 2 +- modules/trimgalore/functions.nf | 58 +++++++++++-------- modules/trimgalore/main.nf | 14 +++-- modules/trimgalore/meta.yml | 2 +- modules/ucsc/bed12tobigbed/functions.nf | 58 +++++++++++-------- modules/ucsc/bed12tobigbed/main.nf | 9 ++- modules/ucsc/bed12tobigbed/meta.yml | 2 +- modules/ucsc/bedclip/functions.nf | 58 +++++++++++-------- modules/ucsc/bedclip/main.nf | 9 ++- modules/ucsc/bedclip/meta.yml | 2 +- modules/ucsc/bedgraphtobigwig/functions.nf | 58 +++++++++++-------- modules/ucsc/bedgraphtobigwig/main.nf | 9 ++- modules/ucsc/bedgraphtobigwig/meta.yml | 2 +- .../ucsc/bigwigaverageoverbed/functions.nf | 58 +++++++++++-------- modules/ucsc/bigwigaverageoverbed/main.nf | 9 ++- modules/ucsc/bigwigaverageoverbed/meta.yml | 2 +- modules/ucsc/wigtobigwig/functions.nf | 58 +++++++++++-------- modules/ucsc/wigtobigwig/main.nf | 9 ++- modules/ucsc/wigtobigwig/meta.yml | 2 +- modules/umitools/dedup/functions.nf | 58 +++++++++++-------- modules/umitools/dedup/main.nf | 9 ++- modules/umitools/extract/functions.nf | 58 +++++++++++-------- modules/umitools/extract/main.nf | 14 +++-- modules/unicycler/functions.nf | 58 +++++++++++-------- modules/unicycler/main.nf | 9 ++- modules/unicycler/meta.yml | 4 +- modules/untar/functions.nf | 58 +++++++++++-------- modules/untar/main.nf | 9 ++- modules/untar/meta.yml | 2 +- modules/unzip/functions.nf | 58 +++++++++++-------- modules/unzip/main.nf | 9 ++- modules/unzip/meta.yml | 2 +- modules/variantbam/functions.nf | 58 +++++++++++-------- modules/variantbam/main.nf | 9 ++- modules/variantbam/meta.yml | 2 +- modules/vcftools/functions.nf | 58 +++++++++++-------- modules/vcftools/main.nf | 9 ++- modules/vcftools/meta.yml | 2 +- modules/yara/index/functions.nf | 58 +++++++++++-------- modules/yara/index/main.nf | 9 ++- modules/yara/index/meta.yml | 2 +- modules/yara/mapper/functions.nf | 58 +++++++++++-------- modules/yara/mapper/main.nf | 14 +++-- modules/yara/mapper/meta.yml | 2 +- 744 files changed, 10399 insertions(+), 7063 deletions(-) diff --git a/modules/abacas/functions.nf b/modules/abacas/functions.nf index da9da093..85628ee0 100644 --- a/modules/abacas/functions.nf +++ b/modules/abacas/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index 6ec65ea2..0e46f854 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process ABACAS { output: tuple val(meta), path('*.abacas*'), emit: results - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process ABACAS { mv nucmer.filtered.delta ${prefix}.abacas.nucmer.filtered.delta mv nucmer.tiling ${prefix}.abacas.nucmer.tiling mv unused_contigs.out ${prefix}.abacas.unused.contigs.out - echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(abacas.pl -v 2>&1 | sed 's/^.*ABACAS.//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/abacas/meta.yml b/modules/abacas/meta.yml index d60afee0..d8c45628 100644 --- a/modules/abacas/meta.yml +++ b/modules/abacas/meta.yml @@ -51,7 +51,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/adapterremoval/functions.nf b/modules/adapterremoval/functions.nf index da9da093..85628ee0 100644 --- a/modules/adapterremoval/functions.nf +++ b/modules/adapterremoval/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 08b1b8bf..cbf0957a 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process ADAPTERREMOVAL { output: tuple val(meta), path('*.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,7 +41,10 @@ process ADAPTERREMOVAL { --seed 42 \\ --gzip \\ - AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + END_VERSIONS """ } else if (!meta.single_end && !meta.collapse) { """ @@ -57,7 +60,10 @@ process ADAPTERREMOVAL { --seed 42 \\ --gzip \\ - AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + END_VERSIONS """ } else { """ @@ -73,7 +79,10 @@ process ADAPTERREMOVAL { --gzip \\ cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz - AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + END_VERSIONS """ } diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index 15e0a201..6282436a 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -44,7 +44,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxibor" diff --git a/modules/agrvate/functions.nf b/modules/agrvate/functions.nf index da9da093..85628ee0 100644 --- a/modules/agrvate/functions.nf +++ b/modules/agrvate/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index 8f504927..44ec0825 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process AGRVATE { output: tuple val(meta), path("${fasta.baseName}-results/${fasta.baseName}-summary.tab"), emit: summary path "${fasta.baseName}-results" , emit: results_dir - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process AGRVATE { $options.args \\ -i $fasta - echo \$(agrvate -v 2>&1) | sed 's/agrvate //;' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(agrvate -v 2>&1 | sed 's/agrvate //;') + END_VERSIONS """ } diff --git a/modules/agrvate/meta.yml b/modules/agrvate/meta.yml index 97aa5f58..bd27050a 100644 --- a/modules/agrvate/meta.yml +++ b/modules/agrvate/meta.yml @@ -41,6 +41,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/allelecounter/functions.nf b/modules/allelecounter/functions.nf index da9da093..85628ee0 100644 --- a/modules/allelecounter/functions.nf +++ b/modules/allelecounter/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index ad24b3c1..31ef3f79 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process ALLELECOUNTER { output: tuple val(meta), path("*.alleleCount"), emit: allelecount - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process ALLELECOUNTER { -b $bam \\ -o ${prefix}.alleleCount - alleleCounter --version > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(alleleCounter --version) + END_VERSIONS """ } diff --git a/modules/allelecounter/meta.yml b/modules/allelecounter/meta.yml index 28f96836..67b398f3 100644 --- a/modules/allelecounter/meta.yml +++ b/modules/allelecounter/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - alleleCount: type: file description: Allele count file diff --git a/modules/arriba/functions.nf b/modules/arriba/functions.nf index da9da093..85628ee0 100644 --- a/modules/arriba/functions.nf +++ b/modules/arriba/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 739922ef..b94c22d9 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process ARRIBA { output: tuple val(meta), path("*.fusions.tsv") , emit: fusions tuple val(meta), path("*.fusions.discarded.tsv"), emit: fusions_fail - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process ARRIBA { $blacklist \\ $options.args - echo \$(arriba -h | grep 'Version:' 2>&1) | sed 's/Version:\s//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') + END_VERSIONS """ } diff --git a/modules/arriba/meta.yml b/modules/arriba/meta.yml index 370f82ec..ddd2c75b 100644 --- a/modules/arriba/meta.yml +++ b/modules/arriba/meta.yml @@ -40,7 +40,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - fusions: type: file description: File contains fusions which pass all of Arriba's filters. diff --git a/modules/artic/guppyplex/functions.nf b/modules/artic/guppyplex/functions.nf index da9da093..85628ee0 100644 --- a/modules/artic/guppyplex/functions.nf +++ b/modules/artic/guppyplex/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index 41178298..5f91e9e3 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process ARTIC_GUPPYPLEX { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process ARTIC_GUPPYPLEX { --output ${prefix}.fastq pigz -p $task.cpus *.fastq - echo \$(artic --version 2>&1) | sed 's/^.*artic //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/artic/guppyplex/meta.yml b/modules/artic/guppyplex/meta.yml index 0caaf5d2..45ec7138 100644 --- a/modules/artic/guppyplex/meta.yml +++ b/modules/artic/guppyplex/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/artic/minion/functions.nf b/modules/artic/minion/functions.nf index da9da093..85628ee0 100644 --- a/modules/artic/minion/functions.nf +++ b/modules/artic/minion/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index e408551b..2f810ecf 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -40,7 +40,7 @@ process ARTIC_MINION { tuple val(meta), path("${prefix}.pass.vcf.gz") , emit: vcf tuple val(meta), path("${prefix}.pass.vcf.gz.tbi") , emit: tbi tuple val(meta), path("*.json"), optional:true , emit: json - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -68,6 +68,9 @@ process ARTIC_MINION { $scheme \\ $prefix - echo \$(artic --version 2>&1) | sed 's/^.*artic //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/artic/minion/meta.yml b/modules/artic/minion/meta.yml index 1b6a73cf..77f325e5 100644 --- a/modules/artic/minion/meta.yml +++ b/modules/artic/minion/meta.yml @@ -106,7 +106,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bamaligncleaner/functions.nf b/modules/bamaligncleaner/functions.nf index da9da093..85628ee0 100644 --- a/modules/bamaligncleaner/functions.nf +++ b/modules/bamaligncleaner/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index 8ce73ee4..7372f274 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BAMALIGNCLEANER { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process BAMALIGNCLEANER { -o ${prefix}.bam \\ ${bam} - echo \$(bamAlignCleaner --version) | sed 's/.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bamAlignCleaner --version | sed 's/.*version //') + END_VERSIONS """ } diff --git a/modules/bamaligncleaner/meta.yml b/modules/bamaligncleaner/meta.yml index 8afdd44b..c236c0ea 100644 --- a/modules/bamaligncleaner/meta.yml +++ b/modules/bamaligncleaner/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Sorted BAM/CRAM file diff --git a/modules/bandage/image/functions.nf b/modules/bandage/image/functions.nf index da9da093..85628ee0 100644 --- a/modules/bandage/image/functions.nf +++ b/modules/bandage/image/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index 6afdb60d..c788e2e1 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BANDAGE_IMAGE { output: tuple val(meta), path('*.png'), emit: png tuple val(meta), path('*.svg'), emit: svg - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process BANDAGE_IMAGE { Bandage image $gfa ${prefix}.png $options.args Bandage image $gfa ${prefix}.svg $options.args - echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(Bandage --version 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bandage/image/meta.yml b/modules/bandage/image/meta.yml index 26c23a07..f655cae4 100644 --- a/modules/bandage/image/meta.yml +++ b/modules/bandage/image/meta.yml @@ -38,6 +38,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/bbmap/align/functions.nf b/modules/bbmap/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/bbmap/align/functions.nf +++ b/modules/bbmap/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index eca45ddb..8235e78d 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BBMAP_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -54,6 +54,9 @@ process BBMAP_ALIGN { threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g - echo \$(bbversion.sh) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bbversion.sh) + END_VERSIONS """ } diff --git a/modules/bbmap/align/meta.yml b/modules/bbmap/align/meta.yml index b008ea0f..bb52f06e 100644 --- a/modules/bbmap/align/meta.yml +++ b/modules/bbmap/align/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: BAM file diff --git a/modules/bbmap/bbduk/functions.nf b/modules/bbmap/bbduk/functions.nf index da9da093..85628ee0 100644 --- a/modules/bbmap/bbduk/functions.nf +++ b/modules/bbmap/bbduk/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index 797dc8b5..4f1540dc 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BBMAP_BBDUK { output: tuple val(meta), path('*.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process BBMAP_BBDUK { $options.args \\ $contaminants_fa \\ &> ${prefix}.bbduk.log - echo \$(bbversion.sh) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bbversion.sh) + END_VERSIONS """ } diff --git a/modules/bbmap/bbduk/meta.yml b/modules/bbmap/bbduk/meta.yml index ee2eea2c..a1ab789c 100644 --- a/modules/bbmap/bbduk/meta.yml +++ b/modules/bbmap/bbduk/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - log: type: file description: Bbduk log file diff --git a/modules/bbmap/index/functions.nf b/modules/bbmap/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/bbmap/index/functions.nf +++ b/modules/bbmap/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index 0e15b13f..6f957d03 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BBMAP_INDEX { output: path 'ref' , emit: index - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process BBMAP_INDEX { threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g - echo \$(bbversion.sh) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bbversion.sh) + END_VERSIONS """ } diff --git a/modules/bbmap/index/meta.yml b/modules/bbmap/index/meta.yml index a51a44fd..1df990b2 100644 --- a/modules/bbmap/index/meta.yml +++ b/modules/bbmap/index/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - db: type: directory description: Directory with index files diff --git a/modules/bcftools/concat/functions.nf b/modules/bcftools/concat/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/concat/functions.nf +++ b/modules/bcftools/concat/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index 0266f4f0..fab0e83d 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BCFTOOLS_CONCAT { output: tuple val(meta), path("*.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process BCFTOOLS_CONCAT { --threads $task.cpus \\ ${vcfs} - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/concat/meta.yml b/modules/bcftools/concat/meta.yml index 566e6dba..81701288 100644 --- a/modules/bcftools/concat/meta.yml +++ b/modules/bcftools/concat/meta.yml @@ -37,6 +37,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/consensus/functions.nf b/modules/bcftools/consensus/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/consensus/functions.nf +++ b/modules/bcftools/consensus/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 0403f050..29758a4b 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BCFTOOLS_CONSENSUS { output: tuple val(meta), path('*.fa'), emit: fasta - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process BCFTOOLS_CONSENSUS { header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') sed -i 's/\${header}/${meta.id}/g' ${prefix}.fa - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/consensus/meta.yml b/modules/bcftools/consensus/meta.yml index ef14479d..4241e441 100644 --- a/modules/bcftools/consensus/meta.yml +++ b/modules/bcftools/consensus/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/filter/functions.nf b/modules/bcftools/filter/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/filter/functions.nf +++ b/modules/bcftools/filter/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index fbdac0de..37b7e28b 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BCFTOOLS_FILTER { output: tuple val(meta), path("*.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process BCFTOOLS_FILTER { $options.args \\ $vcf - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/filter/meta.yml b/modules/bcftools/filter/meta.yml index fe9a57e6..6842b1f8 100644 --- a/modules/bcftools/filter/meta.yml +++ b/modules/bcftools/filter/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/isec/functions.nf b/modules/bcftools/isec/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/isec/functions.nf +++ b/modules/bcftools/isec/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index 28c6103e..f700f35c 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BCFTOOLS_ISEC { output: tuple val(meta), path("${prefix}"), emit: results - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process BCFTOOLS_ISEC { $options.args \\ -p $prefix \\ *.vcf.gz - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/isec/meta.yml b/modules/bcftools/isec/meta.yml index fb8f4b4e..7a75a3af 100644 --- a/modules/bcftools/isec/meta.yml +++ b/modules/bcftools/isec/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/merge/functions.nf b/modules/bcftools/merge/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/merge/functions.nf +++ b/modules/bcftools/merge/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index 66c52281..7d8ab670 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BCFTOOLS_MERGE { output: tuple val(meta), path("*.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process BCFTOOLS_MERGE { --output ${prefix}.vcf.gz \\ $options.args \\ *.vcf.gz - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/merge/meta.yml b/modules/bcftools/merge/meta.yml index fad7966e..262d883a 100644 --- a/modules/bcftools/merge/meta.yml +++ b/modules/bcftools/merge/meta.yml @@ -40,7 +40,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/mpileup/functions.nf b/modules/bcftools/mpileup/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/mpileup/functions.nf +++ b/modules/bcftools/mpileup/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index de9b951f..1f6eecaa 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process BCFTOOLS_MPILEUP { tuple val(meta), path("*.gz") , emit: vcf tuple val(meta), path("*.tbi") , emit: tbi tuple val(meta), path("*stats.txt"), emit: stats - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process BCFTOOLS_MPILEUP { | bcftools view --output-file ${prefix}.vcf.gz --output-type z $options.args3 tabix -p vcf -f ${prefix}.vcf.gz bcftools stats ${prefix}.vcf.gz > ${prefix}.bcftools_stats.txt - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/mpileup/meta.yml b/modules/bcftools/mpileup/meta.yml index a15aea14..44f2b81e 100644 --- a/modules/bcftools/mpileup/meta.yml +++ b/modules/bcftools/mpileup/meta.yml @@ -46,7 +46,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/norm/functions.nf b/modules/bcftools/norm/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/norm/functions.nf +++ b/modules/bcftools/norm/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 5d8a7c3c..454fc1d2 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BCFTOOLS_NORM { output: tuple val(meta), path("*.gz") , emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BCFTOOLS_NORM { --threads $task.cpus \\ ${vcf} - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/norm/meta.yml b/modules/bcftools/norm/meta.yml index abeb8904..f2534452 100644 --- a/modules/bcftools/norm/meta.yml +++ b/modules/bcftools/norm/meta.yml @@ -40,6 +40,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/query/functions.nf b/modules/bcftools/query/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/query/functions.nf +++ b/modules/bcftools/query/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index 26eeca63..4815ae90 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process BCFTOOLS_QUERY { output: tuple val(meta), path("*.gz") , emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -44,6 +44,9 @@ process BCFTOOLS_QUERY { $options.args \\ ${vcf} - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/query/meta.yml b/modules/bcftools/query/meta.yml index 7806c7db..57570c64 100644 --- a/modules/bcftools/query/meta.yml +++ b/modules/bcftools/query/meta.yml @@ -56,6 +56,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/reheader/functions.nf b/modules/bcftools/reheader/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/reheader/functions.nf +++ b/modules/bcftools/reheader/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 53b00411..a949b6e9 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process BCFTOOLS_REHEADER { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process BCFTOOLS_REHEADER { -o ${prefix}.vcf.gz \\ $vcf - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/reheader/meta.yml b/modules/bcftools/reheader/meta.yml index 1b9c1a8b..823e3279 100644 --- a/modules/bcftools/reheader/meta.yml +++ b/modules/bcftools/reheader/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: VCF with updated header diff --git a/modules/bcftools/stats/functions.nf b/modules/bcftools/stats/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/stats/functions.nf +++ b/modules/bcftools/stats/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index 90be5d2b..f5b1f6b1 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BCFTOOLS_STATS { output: tuple val(meta), path("*stats.txt"), emit: stats - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools stats $options.args $vcf > ${prefix}.bcftools_stats.txt - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/stats/meta.yml b/modules/bcftools/stats/meta.yml index 6b70f83a..33675cb9 100644 --- a/modules/bcftools/stats/meta.yml +++ b/modules/bcftools/stats/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bcftools/view/functions.nf b/modules/bcftools/view/functions.nf index da9da093..85628ee0 100644 --- a/modules/bcftools/view/functions.nf +++ b/modules/bcftools/view/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index 92f7036b..5a944e89 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process BCFTOOLS_VIEW { output: tuple val(meta), path("*.gz") , emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -46,6 +46,9 @@ process BCFTOOLS_VIEW { --threads $task.cpus \\ ${vcf} - echo \$(bcftools --version 2>&1) | sed 's/^.*bcftools //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bcftools/view/meta.yml b/modules/bcftools/view/meta.yml index 947e2562..e37e41b5 100644 --- a/modules/bcftools/view/meta.yml +++ b/modules/bcftools/view/meta.yml @@ -57,6 +57,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bedtools/bamtobed/functions.nf b/modules/bedtools/bamtobed/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/bamtobed/functions.nf +++ b/modules/bedtools/bamtobed/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index 22f5713c..19986371 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BEDTOOLS_BAMTOBED { output: tuple val(meta), path("*.bed"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process BEDTOOLS_BAMTOBED { -i $bam \\ | bedtools sort > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/bamtobed/meta.yml b/modules/bedtools/bamtobed/meta.yml index 3594d2d4..5d7889ea 100644 --- a/modules/bedtools/bamtobed/meta.yml +++ b/modules/bedtools/bamtobed/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" - "@drpatelh" diff --git a/modules/bedtools/complement/functions.nf b/modules/bedtools/complement/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/complement/functions.nf +++ b/modules/bedtools/complement/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index 3c39f289..5b3bbea9 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_COMPLEMENT { output: tuple val(meta), path('*.bed'), emit: bed - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BEDTOOLS_COMPLEMENT { $options.args \\ > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/complement/meta.yml b/modules/bedtools/complement/meta.yml index 60d97263..183c9e8f 100644 --- a/modules/bedtools/complement/meta.yml +++ b/modules/bedtools/complement/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/genomecov/functions.nf b/modules/bedtools/genomecov/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/genomecov/functions.nf +++ b/modules/bedtools/genomecov/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index f9b87464..b5deedf1 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process BEDTOOLS_GENOMECOV { output: tuple val(meta), path("*.${extension}"), emit: genomecov - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,7 +38,10 @@ process BEDTOOLS_GENOMECOV { $options.args \\ > ${prefix}.${extension} - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } else { """ @@ -49,7 +52,10 @@ process BEDTOOLS_GENOMECOV { $options.args \\ > ${prefix}.${extension} - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } } diff --git a/modules/bedtools/genomecov/meta.yml b/modules/bedtools/genomecov/meta.yml index f629665c..7f28c185 100644 --- a/modules/bedtools/genomecov/meta.yml +++ b/modules/bedtools/genomecov/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/getfasta/functions.nf b/modules/bedtools/getfasta/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/getfasta/functions.nf +++ b/modules/bedtools/getfasta/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index 374a310b..72e457dc 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_GETFASTA { output: path "*.fa" , emit: fasta - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BEDTOOLS_GETFASTA { -bed $bed \\ -fo ${prefix}.fa - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/getfasta/meta.yml b/modules/bedtools/getfasta/meta.yml index 1ca63bdc..1ddd4bbb 100644 --- a/modules/bedtools/getfasta/meta.yml +++ b/modules/bedtools/getfasta/meta.yml @@ -27,7 +27,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bedtools/intersect/functions.nf b/modules/bedtools/intersect/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/intersect/functions.nf +++ b/modules/bedtools/intersect/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index 4519783a..b75bd116 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_INTERSECT { output: tuple val(meta), path("*.${extension}"), emit: intersect - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BEDTOOLS_INTERSECT { $options.args \\ > ${prefix}.${extension} - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/intersect/meta.yml b/modules/bedtools/intersect/meta.yml index e944e355..2c229884 100644 --- a/modules/bedtools/intersect/meta.yml +++ b/modules/bedtools/intersect/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/makewindows/functions.nf b/modules/bedtools/makewindows/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/makewindows/functions.nf +++ b/modules/bedtools/makewindows/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index d3e82f86..5e93f0ae 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_MAKEWINDOWS { output: tuple val(meta), path("*.tab"), emit: tab - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BEDTOOLS_MAKEWINDOWS { $options.args \\ > ${prefix}.tab - echo \$(bedtools --version) | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/makewindows/meta.yml b/modules/bedtools/makewindows/meta.yml index 3c1378b8..dcddbc75 100644 --- a/modules/bedtools/makewindows/meta.yml +++ b/modules/bedtools/makewindows/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - tab: type: file description: Windows TAB file (BED or BED-like format) diff --git a/modules/bedtools/maskfasta/functions.nf b/modules/bedtools/maskfasta/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/maskfasta/functions.nf +++ b/modules/bedtools/maskfasta/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 02110149..67097f3f 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_MASKFASTA { output: tuple val(meta), path("*.fa"), emit: fasta - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process BEDTOOLS_MASKFASTA { -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/maskfasta/meta.yml b/modules/bedtools/maskfasta/meta.yml index b6e494e6..0474118b 100644 --- a/modules/bedtools/maskfasta/meta.yml +++ b/modules/bedtools/maskfasta/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bedtools/merge/functions.nf b/modules/bedtools/merge/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/merge/functions.nf +++ b/modules/bedtools/merge/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 4ac7d1a5..ba8348af 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BEDTOOLS_MERGE { output: tuple val(meta), path('*.bed'), emit: bed - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process BEDTOOLS_MERGE { $options.args \\ > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/merge/meta.yml b/modules/bedtools/merge/meta.yml index f75bea67..0618c0ff 100644 --- a/modules/bedtools/merge/meta.yml +++ b/modules/bedtools/merge/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/slop/functions.nf b/modules/bedtools/slop/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/slop/functions.nf +++ b/modules/bedtools/slop/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index 33dc1930..6644b8db 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BEDTOOLS_SLOP { output: tuple val(meta), path("*.bed"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,7 +37,10 @@ process BEDTOOLS_SLOP { $options.args \\ > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/slop/meta.yml b/modules/bedtools/slop/meta.yml index 3d4e2091..bdcdc1d2 100644 --- a/modules/bedtools/slop/meta.yml +++ b/modules/bedtools/slop/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/sort/functions.nf b/modules/bedtools/sort/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/sort/functions.nf +++ b/modules/bedtools/sort/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 908514d9..acc4a593 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BEDTOOLS_SORT { output: tuple val(meta), path('*.bed'), emit: bed - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process BEDTOOLS_SORT { $options.args \\ > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/sort/meta.yml b/modules/bedtools/sort/meta.yml index 9962a4f2..d09886a5 100644 --- a/modules/bedtools/sort/meta.yml +++ b/modules/bedtools/sort/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Emiller88" - "@sruthipsuresh" diff --git a/modules/bedtools/subtract/functions.nf b/modules/bedtools/subtract/functions.nf index da9da093..85628ee0 100644 --- a/modules/bedtools/subtract/functions.nf +++ b/modules/bedtools/subtract/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index 5780cd65..a8e2ad02 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BEDTOOLS_SUBTRACT { output: tuple val(meta), path("*.bed"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process BEDTOOLS_SUBTRACT { $options.args \\ > ${prefix}.bed - bedtools --version | sed -e "s/bedtools v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + END_VERSIONS """ } diff --git a/modules/bedtools/subtract/meta.yml b/modules/bedtools/subtract/meta.yml index a1a1e087..8c99b80a 100644 --- a/modules/bedtools/subtract/meta.yml +++ b/modules/bedtools/subtract/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@sidorov-si" diff --git a/modules/bismark/align/functions.nf b/modules/bismark/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/align/functions.nf +++ b/modules/bismark/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index 02f439f6..00510272 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process BISMARK_ALIGN { tuple val(meta), path("*bam") , emit: bam tuple val(meta), path("*report.txt"), emit: report tuple val(meta), path("*fq.gz") , optional:true, emit: unmapped - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process BISMARK_ALIGN { --genome $index \\ --bam - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/align/meta.yml b/modules/bismark/align/meta.yml index ed3b1999..d9bacf04 100644 --- a/modules/bismark/align/meta.yml +++ b/modules/bismark/align/meta.yml @@ -53,6 +53,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/deduplicate/functions.nf b/modules/bismark/deduplicate/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/deduplicate/functions.nf +++ b/modules/bismark/deduplicate/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index 6d7010af..6e3219f0 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BISMARK_DEDUPLICATE { output: tuple val(meta), path("*.deduplicated.bam") , emit: bam tuple val(meta), path("*.deduplication_report.txt"), emit: report - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process BISMARK_DEDUPLICATE { $seqtype \\ --bam $bam - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/deduplicate/meta.yml b/modules/bismark/deduplicate/meta.yml index 117eed49..11d8797b 100644 --- a/modules/bismark/deduplicate/meta.yml +++ b/modules/bismark/deduplicate/meta.yml @@ -46,6 +46,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/genomepreparation/functions.nf b/modules/bismark/genomepreparation/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/genomepreparation/functions.nf +++ b/modules/bismark/genomepreparation/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 3d48d955..029804d9 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BISMARK_GENOMEPREPARATION { output: path "BismarkIndex" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -32,6 +32,9 @@ process BISMARK_GENOMEPREPARATION { $options.args \\ BismarkIndex - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/genomepreparation/meta.yml b/modules/bismark/genomepreparation/meta.yml index a31add8a..6d267343 100644 --- a/modules/bismark/genomepreparation/meta.yml +++ b/modules/bismark/genomepreparation/meta.yml @@ -31,6 +31,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/methylationextractor/functions.nf b/modules/bismark/methylationextractor/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/methylationextractor/functions.nf +++ b/modules/bismark/methylationextractor/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index cdc4999c..5968d38f 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process BISMARK_METHYLATIONEXTRACTOR { tuple val(meta), path("*.cov.gz") , emit: coverage tuple val(meta), path("*_splitting_report.txt"), emit: report tuple val(meta), path("*.M-bias.txt") , emit: mbias - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def seqtype = meta.single_end ? '-s' : '-p' @@ -43,6 +43,9 @@ process BISMARK_METHYLATIONEXTRACTOR { $options.args \\ $bam - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/methylationextractor/meta.yml b/modules/bismark/methylationextractor/meta.yml index a201b586..2ae7cf64 100644 --- a/modules/bismark/methylationextractor/meta.yml +++ b/modules/bismark/methylationextractor/meta.yml @@ -61,6 +61,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/report/functions.nf b/modules/bismark/report/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/report/functions.nf +++ b/modules/bismark/report/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index 180efc7e..8148b061 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BISMARK_REPORT { output: tuple val(meta), path("*report.{html,txt}"), emit: report - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ bismark2report $options.args - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/report/meta.yml b/modules/bismark/report/meta.yml index 3e8da1d0..57b8c746 100644 --- a/modules/bismark/report/meta.yml +++ b/modules/bismark/report/meta.yml @@ -54,6 +54,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/summary/functions.nf b/modules/bismark/summary/functions.nf index da9da093..85628ee0 100644 --- a/modules/bismark/summary/functions.nf +++ b/modules/bismark/summary/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index f5c03529..ae8ac27c 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,13 +26,16 @@ process BISMARK_SUMMARY { output: path "*report.{html,txt}", emit: summary - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ bismark2summary - echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/bismark/summary/meta.yml b/modules/bismark/summary/meta.yml index a88bf8f6..37d8951b 100644 --- a/modules/bismark/summary/meta.yml +++ b/modules/bismark/summary/meta.yml @@ -48,6 +48,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/blast/blastn/functions.nf b/modules/blast/blastn/functions.nf index da9da093..85628ee0 100644 --- a/modules/blast/blastn/functions.nf +++ b/modules/blast/blastn/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 87e012e2..1146ede4 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BLAST_BLASTN { output: tuple val(meta), path('*.blastn.txt'), emit: txt - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process BLAST_BLASTN { -query $fasta \\ $options.args \\ -out ${prefix}.blastn.txt - echo \$(blastn -version 2>&1) | sed 's/^.*blastn: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/blast/blastn/meta.yml b/modules/blast/blastn/meta.yml index d04889a8..b4a832ea 100644 --- a/modules/blast/blastn/meta.yml +++ b/modules/blast/blastn/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/blast/makeblastdb/functions.nf b/modules/blast/makeblastdb/functions.nf index da9da093..85628ee0 100644 --- a/modules/blast/makeblastdb/functions.nf +++ b/modules/blast/makeblastdb/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index c938e8f6..9ee02108 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process BLAST_MAKEBLASTDB { output: path 'blast_db' , emit: db - path '*.version.txt', emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process BLAST_MAKEBLASTDB { $options.args mkdir blast_db mv ${fasta}* blast_db - echo \$(blastn -version 2>&1) | sed 's/^.*blastn: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/blast/makeblastdb/meta.yml b/modules/blast/makeblastdb/meta.yml index 0ea4903f..9a5957db 100644 --- a/modules/blast/makeblastdb/meta.yml +++ b/modules/blast/makeblastdb/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bowtie/align/functions.nf b/modules/bowtie/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/bowtie/align/functions.nf +++ b/modules/bowtie/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 9cafbfd2..3357a592 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process BOWTIE_ALIGN { output: tuple val(meta), path('*.bam'), emit: bam tuple val(meta), path('*.out'), emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: @@ -55,6 +55,9 @@ process BOWTIE_ALIGN { gzip ${prefix}.unmapped_2.fastq fi - echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bowtie --version 2>&1 | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bowtie/align/meta.yml b/modules/bowtie/align/meta.yml index bea8b4dd..e5ada585 100644 --- a/modules/bowtie/align/meta.yml +++ b/modules/bowtie/align/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - fastq: type: file description: Unaligned FastQ files diff --git a/modules/bowtie/build/functions.nf b/modules/bowtie/build/functions.nf index da9da093..85628ee0 100644 --- a/modules/bowtie/build/functions.nf +++ b/modules/bowtie/build/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 3a6071a9..382e6717 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BOWTIE_BUILD { output: path 'bowtie' , emit: index - path '*.version.txt', emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ mkdir bowtie bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} - echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bowtie --version 2>&1 | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bowtie/build/meta.yml b/modules/bowtie/build/meta.yml index a2da42c6..e97068f6 100644 --- a/modules/bowtie/build/meta.yml +++ b/modules/bowtie/build/meta.yml @@ -25,7 +25,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kevinmenden" - "@drpatelh" diff --git a/modules/bowtie2/align/functions.nf b/modules/bowtie2/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/bowtie2/align/functions.nf +++ b/modules/bowtie2/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index d43d479d..e1657a8f 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process BOWTIE2_ALIGN { output: tuple val(meta), path('*.bam'), emit: bam tuple val(meta), path('*.log'), emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: @@ -45,7 +45,10 @@ process BOWTIE2_ALIGN { 2> ${prefix}.bowtie2.log \\ | samtools view -@ ${split_cpus} $options.args2 -bhS -o ${prefix}.bam - - echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + END_VERSIONS """ } else { def unaligned = params.save_unaligned ? "--un-conc-gz ${prefix}.unmapped.fastq.gz" : '' @@ -67,7 +70,10 @@ process BOWTIE2_ALIGN { if [ -f ${prefix}.unmapped.fastq.2.gz ]; then mv ${prefix}.unmapped.fastq.2.gz ${prefix}.unmapped_2.fastq.gz fi - echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + END_VERSIONS """ } } diff --git a/modules/bowtie2/align/meta.yml b/modules/bowtie2/align/meta.yml index 9d9cd004..cba6eacf 100644 --- a/modules/bowtie2/align/meta.yml +++ b/modules/bowtie2/align/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - fastq: type: file description: Unaligned FastQ files diff --git a/modules/bowtie2/build/functions.nf b/modules/bowtie2/build/functions.nf index da9da093..85628ee0 100644 --- a/modules/bowtie2/build/functions.nf +++ b/modules/bowtie2/build/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index 442fed18..04880aeb 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BOWTIE2_BUILD { output: path 'bowtie2' , emit: index - path '*.version.txt', emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ mkdir bowtie2 bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} - echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/bowtie2/build/meta.yml b/modules/bowtie2/build/meta.yml index 0a4cd3de..70045f3c 100644 --- a/modules/bowtie2/build/meta.yml +++ b/modules/bowtie2/build/meta.yml @@ -26,7 +26,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/bwa/aln/functions.nf b/modules/bwa/aln/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwa/aln/functions.nf +++ b/modules/bwa/aln/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index 59f1396c..8728884c 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWA_ALN { output: tuple val(meta), path("*.sai"), emit: sai - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,7 +41,10 @@ process BWA_ALN { \$INDEX \\ ${reads} - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } else { """ @@ -61,7 +64,10 @@ process BWA_ALN { \$INDEX \\ ${reads[1]} - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } } diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml index 4f81588d..b3797eac 100644 --- a/modules/bwa/aln/meta.yml +++ b/modules/bwa/aln/meta.yml @@ -44,7 +44,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - sai: type: file description: Single or paired SA coordinate files diff --git a/modules/bwa/index/functions.nf b/modules/bwa/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwa/index/functions.nf +++ b/modules/bwa/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index aabd187c..9b64bd37 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BWA_INDEX { output: path "bwa" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ mkdir bwa bwa index $options.args $fasta -p bwa/${fasta.baseName} - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } diff --git a/modules/bwa/index/meta.yml b/modules/bwa/index/meta.yml index 181204c3..43ffd73d 100644 --- a/modules/bwa/index/meta.yml +++ b/modules/bwa/index/meta.yml @@ -25,7 +25,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@maxulysse" diff --git a/modules/bwa/mem/functions.nf b/modules/bwa/mem/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwa/mem/functions.nf +++ b/modules/bwa/mem/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 5e139d5a..b9096cb8 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWA_MEM { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def split_cpus = Math.floor(task.cpus/2) @@ -42,6 +42,9 @@ process BWA_MEM { $reads \\ | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 693c5450..618f20d5 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@jeremy1805" diff --git a/modules/bwa/sampe/functions.nf b/modules/bwa/sampe/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwa/sampe/functions.nf +++ b/modules/bwa/sampe/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 7a724908..cb3493c8 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWA_SAMPE { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process BWA_SAMPE { $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } diff --git a/modules/bwa/sampe/meta.yml b/modules/bwa/sampe/meta.yml index 6dc1bcc5..aeb592f7 100644 --- a/modules/bwa/sampe/meta.yml +++ b/modules/bwa/sampe/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: BAM file diff --git a/modules/bwa/samse/functions.nf b/modules/bwa/samse/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwa/samse/functions.nf +++ b/modules/bwa/samse/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 3fe8bdd8..82d23854 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWA_SAMSE { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process BWA_SAMSE { $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam - echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + END_VERSIONS """ } diff --git a/modules/bwa/samse/meta.yml b/modules/bwa/samse/meta.yml index 89917703..3c44741d 100644 --- a/modules/bwa/samse/meta.yml +++ b/modules/bwa/samse/meta.yml @@ -49,7 +49,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: BAM file diff --git a/modules/bwamem2/index/functions.nf b/modules/bwamem2/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwamem2/index/functions.nf +++ b/modules/bwamem2/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index b667f266..f052d172 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BWAMEM2_INDEX { output: path "bwamem2" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ mkdir bwamem2 bwa-mem2 index $options.args $fasta -p bwamem2/${fasta} - echo \$(bwa-mem2 version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa-mem2 version 2>&1) + END_VERSIONS """ } diff --git a/modules/bwamem2/index/meta.yml b/modules/bwamem2/index/meta.yml index 9d717f73..ee84ccfc 100644 --- a/modules/bwamem2/index/meta.yml +++ b/modules/bwamem2/index/meta.yml @@ -24,6 +24,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/bwamem2/mem/functions.nf b/modules/bwamem2/mem/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwamem2/mem/functions.nf +++ b/modules/bwamem2/mem/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 5d0ff617..2838cdda 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWAMEM2_MEM { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def split_cpus = Math.floor(task.cpus/2) @@ -42,6 +42,9 @@ process BWAMEM2_MEM { $reads \\ | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - - echo \$(bwa-mem2 version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwa-mem2 version 2>&1) + END_VERSIONS """ } diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 2fc7713d..434fc7ca 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -39,6 +39,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/bwameth/align/functions.nf b/modules/bwameth/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwameth/align/functions.nf +++ b/modules/bwameth/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 0e8db58f..0f605bd1 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process BWAMETH_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def split_cpus = Math.floor(task.cpus/2) @@ -42,6 +42,9 @@ process BWAMETH_ALIGN { $reads \\ | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - - echo \$(bwameth.py --version 2>&1) | cut -f2 -d" " > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwameth.py --version 2>&1 | cut -f2 -d" ") + END_VERSIONS """ } diff --git a/modules/bwameth/align/meta.yml b/modules/bwameth/align/meta.yml index ac578031..03bd66f7 100644 --- a/modules/bwameth/align/meta.yml +++ b/modules/bwameth/align/meta.yml @@ -46,6 +46,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bwameth/index/functions.nf b/modules/bwameth/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/bwameth/index/functions.nf +++ b/modules/bwameth/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index dbea0ae4..7b75d328 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process BWAMETH_INDEX { output: path "bwameth" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ bwameth.py index $fasta - echo \$(bwameth.py --version 2>&1) | cut -f2 -d" " > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bwameth.py --version 2>&1 | cut -f2 -d" ") + END_VERSIONS """ } diff --git a/modules/bwameth/index/meta.yml b/modules/bwameth/index/meta.yml index 79c54862..b07dbde5 100644 --- a/modules/bwameth/index/meta.yml +++ b/modules/bwameth/index/meta.yml @@ -27,6 +27,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/cat/cat/functions.nf b/modules/cat/cat/functions.nf index da9da093..85628ee0 100644 --- a/modules/cat/cat/functions.nf +++ b/modules/cat/cat/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 1c7dbd7c..2dc9944f 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process CAT_CAT { output: path "${file_out}*" , emit: file_out - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def file_list = files_in.collect { it.toString() } @@ -47,7 +47,10 @@ process CAT_CAT { $command2 \\ > $file_out - echo \$(pigz --version 2>&1) | sed 's/pigz //g' > pigz.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS """ } } diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml index a1318b19..d283107e 100644 --- a/modules/cat/cat/meta.yml +++ b/modules/cat/cat/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing version of the pigz software - pattern: "*.{version.txt}" + pattern: "versions.yml" - file_out: type: file description: Concatenated file. Will be gzipped if file_out ends with ".gz" diff --git a/modules/cat/fastq/functions.nf b/modules/cat/fastq/functions.nf index da9da093..85628ee0 100644 --- a/modules/cat/fastq/functions.nf +++ b/modules/cat/fastq/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/chromap/chromap/functions.nf b/modules/chromap/chromap/functions.nf index da9da093..85628ee0 100644 --- a/modules/chromap/chromap/functions.nf +++ b/modules/chromap/chromap/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index be60c6bd..cbee7fc0 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -34,7 +34,7 @@ process CHROMAP_CHROMAP { tuple val(meta), path("*.bam") , optional:true, emit: bam tuple val(meta), path("*.tagAlign.gz"), optional:true, emit: tagAlign tuple val(meta), path("*.pairs.gz") , optional:true, emit: pairs - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -75,7 +75,10 @@ process CHROMAP_CHROMAP { -1 ${reads.join(',')} \\ -o ${prefix}.${file_extension} - echo "$VERSION" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo "$VERSION") + END_VERSIONS """ + compression_cmds } else { """ @@ -87,7 +90,10 @@ process CHROMAP_CHROMAP { -2 ${reads[1]} \\ -o ${prefix}.${file_extension} - echo "$VERSION" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo "$VERSION") + END_VERSIONS """ + compression_cmds } } diff --git a/modules/chromap/chromap/meta.yml b/modules/chromap/chromap/meta.yml index c088ab35..d52e4202 100644 --- a/modules/chromap/chromap/meta.yml +++ b/modules/chromap/chromap/meta.yml @@ -66,7 +66,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bed: type: file description: BED file diff --git a/modules/chromap/index/functions.nf b/modules/chromap/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/chromap/index/functions.nf +++ b/modules/chromap/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index c8a75935..764eefe1 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process CHROMAP_INDEX { output: path "*.index" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process CHROMAP_INDEX { -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index - echo "$VERSION" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo "$VERSION") + END_VERSIONS """ } diff --git a/modules/chromap/index/meta.yml b/modules/chromap/index/meta.yml index 6a86fbeb..0b3aba75 100644 --- a/modules/chromap/index/meta.yml +++ b/modules/chromap/index/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: file description: Index file of the reference genome diff --git a/modules/cnvkit/functions.nf b/modules/cnvkit/functions.nf index da9da093..85628ee0 100755 --- a/modules/cnvkit/functions.nf +++ b/modules/cnvkit/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/main.nf index 4416919e..1219584c 100755 --- a/modules/cnvkit/main.nf +++ b/modules/cnvkit/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process CNVKIT { tuple val(meta), path("*.cnn"), emit: cnn tuple val(meta), path("*.cnr"), emit: cnr tuple val(meta), path("*.cns"), emit: cns - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process CNVKIT { --targets $targetfile \\ $options.args - echo \$(cnvkit.py version) | sed -e "s/cnvkit v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cnvkit.py version | sed -e "s/cnvkit v//g") + END_VERSIONS """ } diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/meta.yml index d3d81ecc..5094308f 100755 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/meta.yml @@ -78,7 +78,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kaurravneet4123" - "@KevinMenden" diff --git a/modules/cooler/digest/functions.nf b/modules/cooler/digest/functions.nf index da9da093..85628ee0 100644 --- a/modules/cooler/digest/functions.nf +++ b/modules/cooler/digest/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index bb4081d9..ee8b347e 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process COOLER_DIGEST { output: path "*.bed" , emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process COOLER_DIGEST { $fasta \\ $enzyme - echo \$(cooler --version 2>&1) | sed 's/cooler, version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS """ } diff --git a/modules/cooler/digest/meta.yml b/modules/cooler/digest/meta.yml index 4623adda..f46fbaff 100644 --- a/modules/cooler/digest/meta.yml +++ b/modules/cooler/digest/meta.yml @@ -29,7 +29,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bed: type: file description: A genome segmentation of restriction fragments as a BED file. diff --git a/modules/cooler/dump/functions.nf b/modules/cooler/dump/functions.nf index da9da093..85628ee0 100644 --- a/modules/cooler/dump/functions.nf +++ b/modules/cooler/dump/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index c381722c..7d456107 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process COOLER_DUMP { output: tuple val(meta), path("*.bedpe"), emit: bedpe - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process COOLER_DUMP { -o ${prefix}.bedpe \\ $cool - echo \$(cooler --version 2>&1) | sed 's/cooler, version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS """ } diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index 0b2fcc60..ab2d0356 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bedpe: type: file description: Output text file diff --git a/modules/cutadapt/functions.nf b/modules/cutadapt/functions.nf index da9da093..85628ee0 100644 --- a/modules/cutadapt/functions.nf +++ b/modules/cutadapt/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index 6dccc2bc..3baf9c7f 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process CUTADAPT { output: tuple val(meta), path('*.trim.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process CUTADAPT { $trimmed \\ $reads \\ > ${prefix}.cutadapt.log - echo \$(cutadapt --version) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cutadapt --version) + END_VERSIONS """ } diff --git a/modules/cutadapt/meta.yml b/modules/cutadapt/meta.yml index 14652343..87276306 100644 --- a/modules/cutadapt/meta.yml +++ b/modules/cutadapt/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/damageprofiler/functions.nf b/modules/damageprofiler/functions.nf index da9da093..85628ee0 100644 --- a/modules/damageprofiler/functions.nf +++ b/modules/damageprofiler/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index b370ae3b..cbb27944 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process DAMAGEPROFILER { output: tuple val(meta), path("${prefix}"), emit: results - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process DAMAGEPROFILER { $options.args - echo \$(damageprofiler -v) | sed 's/^DamageProfiler v//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(damageprofiler -v | sed 's/^DamageProfiler v//') + END_VERSIONS """ } diff --git a/modules/damageprofiler/meta.yml b/modules/damageprofiler/meta.yml index b32b9bff..9451f1b2 100644 --- a/modules/damageprofiler/meta.yml +++ b/modules/damageprofiler/meta.yml @@ -43,7 +43,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - results: type: dir description: DamageProfiler results directory diff --git a/modules/deeptools/computematrix/functions.nf b/modules/deeptools/computematrix/functions.nf index da9da093..85628ee0 100644 --- a/modules/deeptools/computematrix/functions.nf +++ b/modules/deeptools/computematrix/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index 739e7cc1..21a18526 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process DEEPTOOLS_COMPUTEMATRIX { output: tuple val(meta), path("*.mat.gz") , emit: matrix tuple val(meta), path("*.mat.tab"), emit: table - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process DEEPTOOLS_COMPUTEMATRIX { --outFileNameMatrix ${prefix}.computeMatrix.vals.mat.tab \\ --numberOfProcessors $task.cpus - computeMatrix --version | sed -e "s/computeMatrix //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(computeMatrix --version | sed -e "s/computeMatrix //g") + END_VERSIONS """ } diff --git a/modules/deeptools/computematrix/meta.yml b/modules/deeptools/computematrix/meta.yml index d6fd78c7..e3b0282d 100644 --- a/modules/deeptools/computematrix/meta.yml +++ b/modules/deeptools/computematrix/meta.yml @@ -49,7 +49,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@jeremy1805" diff --git a/modules/deeptools/plotfingerprint/functions.nf b/modules/deeptools/plotfingerprint/functions.nf index da9da093..85628ee0 100644 --- a/modules/deeptools/plotfingerprint/functions.nf +++ b/modules/deeptools/plotfingerprint/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index 56ecb688..9271a399 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process DEEPTOOLS_PLOTFINGERPRINT { tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.raw.txt") , emit: matrix tuple val(meta), path("*.qcmetrics.txt"), emit: metrics - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process DEEPTOOLS_PLOTFINGERPRINT { --outQualityMetrics ${prefix}.plotFingerprint.qcmetrics.txt \\ --numberOfProcessors $task.cpus - plotFingerprint --version | sed -e "s/plotFingerprint //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") + END_VERSIONS """ } diff --git a/modules/deeptools/plotfingerprint/meta.yml b/modules/deeptools/plotfingerprint/meta.yml index 6b6f9d8e..6ba88882 100644 --- a/modules/deeptools/plotfingerprint/meta.yml +++ b/modules/deeptools/plotfingerprint/meta.yml @@ -53,7 +53,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@emiller88" diff --git a/modules/deeptools/plotheatmap/functions.nf b/modules/deeptools/plotheatmap/functions.nf index da9da093..85628ee0 100644 --- a/modules/deeptools/plotheatmap/functions.nf +++ b/modules/deeptools/plotheatmap/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 8e25d96f..49362666 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process DEEPTOOLS_PLOTHEATMAP { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.tab"), emit: table - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process DEEPTOOLS_PLOTHEATMAP { --outFileName ${prefix}.plotHeatmap.pdf \\ --outFileNameMatrix ${prefix}.plotHeatmap.mat.tab - plotHeatmap --version | sed -e "s/plotHeatmap //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") + END_VERSIONS """ } diff --git a/modules/deeptools/plotheatmap/meta.yml b/modules/deeptools/plotheatmap/meta.yml index 14311332..97af67f6 100644 --- a/modules/deeptools/plotheatmap/meta.yml +++ b/modules/deeptools/plotheatmap/meta.yml @@ -47,7 +47,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@emiller88" diff --git a/modules/deeptools/plotprofile/functions.nf b/modules/deeptools/plotprofile/functions.nf index da9da093..85628ee0 100644 --- a/modules/deeptools/plotprofile/functions.nf +++ b/modules/deeptools/plotprofile/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index 95f65c84..cba8e161 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process DEEPTOOLS_PLOTPROFILE { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.tab"), emit: table - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process DEEPTOOLS_PLOTPROFILE { --outFileName ${prefix}.plotProfile.pdf \\ --outFileNameData ${prefix}.plotProfile.tab - plotProfile --version | sed -e "s/plotProfile //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plotProfile --version | sed -e "s/plotProfile //g") + END_VERSIONS """ } diff --git a/modules/deeptools/plotprofile/meta.yml b/modules/deeptools/plotprofile/meta.yml index 120280de..08fafa49 100644 --- a/modules/deeptools/plotprofile/meta.yml +++ b/modules/deeptools/plotprofile/meta.yml @@ -47,7 +47,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@emiller88" diff --git a/modules/delly/call/functions.nf b/modules/delly/call/functions.nf index da9da093..85628ee0 100644 --- a/modules/delly/call/functions.nf +++ b/modules/delly/call/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index 3bbda48a..f97ddeb0 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process DELLY_CALL { output: tuple val(meta), path("*.bcf"), emit: bcf tuple val(meta), path("*.csi"), emit: csi - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process DELLY_CALL { -g $fasta \\ $bam \\ - echo \$(delly --version 2>&1) | sed 's/^.*Delly //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(delly --version 2>&1 | sed 's/^.*Delly //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/delly/call/meta.yml b/modules/delly/call/meta.yml index 9fb79959..16d1a6f2 100644 --- a/modules/delly/call/meta.yml +++ b/modules/delly/call/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bcf: type: file description: BCF format diff --git a/modules/diamond/blastp/functions.nf b/modules/diamond/blastp/functions.nf index da9da093..85628ee0 100644 --- a/modules/diamond/blastp/functions.nf +++ b/modules/diamond/blastp/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 88ace780..556f150c 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process DIAMOND_BLASTP { output: tuple val(meta), path('*.txt'), emit: txt - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process DIAMOND_BLASTP { $options.args \\ --out ${prefix}.txt - echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + END_VERSIONS """ } diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml index b6e82f95..e92b1594 100644 --- a/modules/diamond/blastp/meta.yml +++ b/modules/diamond/blastp/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@spficklin" diff --git a/modules/diamond/blastx/functions.nf b/modules/diamond/blastx/functions.nf index da9da093..85628ee0 100644 --- a/modules/diamond/blastx/functions.nf +++ b/modules/diamond/blastx/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index cd9e4838..8b0227a2 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process DIAMOND_BLASTX { output: tuple val(meta), path('*.txt'), emit: txt - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process DIAMOND_BLASTX { $options.args \\ --out ${prefix}.txt - echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + END_VERSIONS """ } diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml index d9670bed..6e92a336 100644 --- a/modules/diamond/blastx/meta.yml +++ b/modules/diamond/blastx/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@spficklin" diff --git a/modules/diamond/makedb/functions.nf b/modules/diamond/makedb/functions.nf index da9da093..85628ee0 100644 --- a/modules/diamond/makedb/functions.nf +++ b/modules/diamond/makedb/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index 3537d0aa..27383955 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process DIAMOND_MAKEDB { output: path "${fasta}.dmnd", emit: db - path '*.version.txt', emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process DIAMOND_MAKEDB { -d $fasta \\ $options.args - echo \$(diamond --version 2>&1) | tail -n 1 | sed 's/^diamond version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + END_VERSIONS """ } diff --git a/modules/diamond/makedb/meta.yml b/modules/diamond/makedb/meta.yml index edb63fab..4d8cb695 100644 --- a/modules/diamond/makedb/meta.yml +++ b/modules/diamond/makedb/meta.yml @@ -28,7 +28,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@spficklin" diff --git a/modules/dragonflye/functions.nf b/modules/dragonflye/functions.nf index da9da093..85628ee0 100644 --- a/modules/dragonflye/functions.nf +++ b/modules/dragonflye/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index cd0195e9..090c9a13 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process DRAGONFLYE { tuple val(meta), path("{flye,miniasm,raven}.fasta") , emit: raw_contigs tuple val(meta), path("{miniasm,raven}-unpolished.gfa"), optional:true , emit: gfa tuple val(meta), path("flye-info.txt"), optional:true , emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process DRAGONFLYE { --ram $memory \\ --outdir ./ \\ --force - echo \$(dragonflye --version 2>&1) | sed 's/^.*dragonflye //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) + END_VERSIONS """ } diff --git a/modules/dragonflye/meta.yml b/modules/dragonflye/meta.yml index a2bf2703..9affa2f3 100644 --- a/modules/dragonflye/meta.yml +++ b/modules/dragonflye/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - contigs: type: file description: The final assembly produced by Dragonflye diff --git a/modules/dshbio/exportsegments/functions.nf b/modules/dshbio/exportsegments/functions.nf index da9da093..85628ee0 100644 --- a/modules/dshbio/exportsegments/functions.nf +++ b/modules/dshbio/exportsegments/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index bf4c9699..6016f777 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process DSHBIO_EXPORTSEGMENTS { output: tuple val(meta), path("*.fa"), emit: fasta - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process DSHBIO_EXPORTSEGMENTS { -i $gfa \\ -o ${prefix}.fa - echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/dshbio/exportsegments/meta.yml b/modules/dshbio/exportsegments/meta.yml index c064527e..c57a6179 100644 --- a/modules/dshbio/exportsegments/meta.yml +++ b/modules/dshbio/exportsegments/meta.yml @@ -35,6 +35,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/filterbed/functions.nf b/modules/dshbio/filterbed/functions.nf index da9da093..85628ee0 100644 --- a/modules/dshbio/filterbed/functions.nf +++ b/modules/dshbio/filterbed/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index cc1daa7d..3f2a068d 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process DSHBIO_FILTERBED { output: tuple val(meta), path("*.bed.gz"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process DSHBIO_FILTERBED { -i $bed \\ -o ${prefix}.bed.gz - echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/dshbio/filterbed/meta.yml b/modules/dshbio/filterbed/meta.yml index 61626ead..5545aac1 100644 --- a/modules/dshbio/filterbed/meta.yml +++ b/modules/dshbio/filterbed/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/filtergff3/functions.nf b/modules/dshbio/filtergff3/functions.nf index da9da093..85628ee0 100644 --- a/modules/dshbio/filtergff3/functions.nf +++ b/modules/dshbio/filtergff3/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index 596c6b8f..2a1ad816 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process DSHBIO_FILTERGFF3 { output: tuple val(meta), path("*.gff3.gz"), emit: gff3 - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process DSHBIO_FILTERGFF3 { -i $gff3 \\ -o ${prefix}.gff3.gz - echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/dshbio/filtergff3/meta.yml b/modules/dshbio/filtergff3/meta.yml index 3f89d71b..d1b7a509 100644 --- a/modules/dshbio/filtergff3/meta.yml +++ b/modules/dshbio/filtergff3/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/splitbed/functions.nf b/modules/dshbio/splitbed/functions.nf index da9da093..85628ee0 100644 --- a/modules/dshbio/splitbed/functions.nf +++ b/modules/dshbio/splitbed/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 75307b14..388ba0ef 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process DSHBIO_SPLITBED { output: tuple val(meta), path("*.bed.gz"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process DSHBIO_SPLITBED { -s '.bed.gz' \\ -i $bed - echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/dshbio/splitbed/meta.yml b/modules/dshbio/splitbed/meta.yml index f2257812..0c4788a1 100644 --- a/modules/dshbio/splitbed/meta.yml +++ b/modules/dshbio/splitbed/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/splitgff3/functions.nf b/modules/dshbio/splitgff3/functions.nf index da9da093..85628ee0 100644 --- a/modules/dshbio/splitgff3/functions.nf +++ b/modules/dshbio/splitgff3/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index fa434b75..b8f81392 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process DSHBIO_SPLITGFF3 { output: tuple val(meta), path("*.gff3.gz"), emit: gff3 - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process DSHBIO_SPLITGFF3 { -s '.gff3.gz' \\ -i $gff3 - echo \$(dsh-bio --version 2>&1) | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/dshbio/splitgff3/meta.yml b/modules/dshbio/splitgff3/meta.yml index 46118bb7..1bdfa652 100644 --- a/modules/dshbio/splitgff3/meta.yml +++ b/modules/dshbio/splitgff3/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/ensemblvep/functions.nf b/modules/ensemblvep/functions.nf index da9da093..85628ee0 100644 --- a/modules/ensemblvep/functions.nf +++ b/modules/ensemblvep/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index c2194e77..32acc4dd 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -33,7 +33,7 @@ process ENSEMBLVEP { output: tuple val(meta), path("*.ann.vcf"), emit: vcf path "*.summary.html" , emit: report - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -57,6 +57,9 @@ process ENSEMBLVEP { rm -rf $prefix - echo \$(vep --help 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(vep --help 2>&1) + END_VERSIONS """ } diff --git a/modules/ensemblvep/meta.yml b/modules/ensemblvep/meta.yml index 5eb111e9..e97c5609 100644 --- a/modules/ensemblvep/meta.yml +++ b/modules/ensemblvep/meta.yml @@ -59,6 +59,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/expansionhunter/functions.nf b/modules/expansionhunter/functions.nf index da9da093..85628ee0 100644 --- a/modules/expansionhunter/functions.nf +++ b/modules/expansionhunter/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 41c6ed6c..7ee97c5a 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process EXPANSIONHUNTER { output: tuple val(meta), path("*.vcf"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process EXPANSIONHUNTER { --variant-catalog $variant_catalog \\ --sex $gender - echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(ExpansionHunter --version 2>&1 | sed 's/^.*ExpansionHunter //') + END_VERSIONS """ } diff --git a/modules/expansionhunter/meta.yml b/modules/expansionhunter/meta.yml index cac3ed2b..a5733d93 100644 --- a/modules/expansionhunter/meta.yml +++ b/modules/expansionhunter/meta.yml @@ -40,7 +40,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: VCF with repeat expansions diff --git a/modules/fastani/functions.nf b/modules/fastani/functions.nf index da9da093..85628ee0 100644 --- a/modules/fastani/functions.nf +++ b/modules/fastani/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 11916a65..7ee35a0d 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process FASTANI { output: tuple val(meta), path("*.ani.txt"), emit: ani - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,7 +37,10 @@ process FASTANI { -rl $reference \\ -o ${prefix}.ani.txt - echo \$(fastANI --version 2>&1) | sed 's/version//;' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + END_VERSIONS """ } else { """ @@ -46,7 +49,10 @@ process FASTANI { -r $reference \\ -o ${prefix}.ani.txt - echo \$(fastANI --version 2>&1) | sed 's/version//;' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + END_VERSIONS """ } } diff --git a/modules/fastani/meta.yml b/modules/fastani/meta.yml index ed6be165..783ae068 100644 --- a/modules/fastani/meta.yml +++ b/modules/fastani/meta.yml @@ -38,6 +38,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/fastp/functions.nf b/modules/fastp/functions.nf index da9da093..85628ee0 100644 --- a/modules/fastp/functions.nf +++ b/modules/fastp/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index 652ffe80..11cd30b4 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process FASTP { tuple val(meta), path('*.json') , emit: json tuple val(meta), path('*.html') , emit: html tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version tuple val(meta), path('*.fail.fastq.gz') , optional:true, emit: reads_fail tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged @@ -49,7 +49,10 @@ process FASTP { $fail_fastq \\ $options.args \\ 2> ${prefix}.fastp.log - echo \$(fastp --version 2>&1) | sed -e "s/fastp //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + END_VERSIONS """ } else { def fail_fastq = save_trimmed_fail ? "--unpaired1 ${prefix}_1.fail.fastq.gz --unpaired2 ${prefix}_2.fail.fastq.gz" : '' @@ -71,7 +74,10 @@ process FASTP { $options.args \\ 2> ${prefix}.fastp.log - echo \$(fastp --version 2>&1) | sed -e "s/fastp //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + END_VERSIONS """ } } diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index d9130d6d..72ddb7d7 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -47,7 +47,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - reads_fail: type: file description: Reads the failed the preprocessing diff --git a/modules/fasttree/functions.nf b/modules/fasttree/functions.nf index da9da093..85628ee0 100644 --- a/modules/fasttree/functions.nf +++ b/modules/fasttree/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fasttree/main.nf b/modules/fasttree/main.nf index f0bacb87..08c093b2 100644 --- a/modules/fasttree/main.nf +++ b/modules/fasttree/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process FASTTREE { output: path "*.tre", emit: phylogeny - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process FASTTREE { -nt $alignment \\ > fasttree_phylogeny.tre - echo \$(fasttree -help 2>&1) | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') + END_VERSIONS """ } diff --git a/modules/fasttree/meta.yml b/modules/fasttree/meta.yml index 5912395d..70000030 100644 --- a/modules/fasttree/meta.yml +++ b/modules/fasttree/meta.yml @@ -22,7 +22,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - phylogeny: type: file description: A phylogeny in Newick format diff --git a/modules/fgbio/callmolecularconsensusreads/functions.nf b/modules/fgbio/callmolecularconsensusreads/functions.nf index da9da093..85628ee0 100644 --- a/modules/fgbio/callmolecularconsensusreads/functions.nf +++ b/modules/fgbio/callmolecularconsensusreads/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index a530ba55..ba099d8d 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { -i $bam \\ $options.args \\ -o ${prefix}.bam - fgbio --version | sed -e "s/fgbio v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fgbio --version | sed -e "s/fgbio v//g") + END_VERSIONS """ } diff --git a/modules/fgbio/callmolecularconsensusreads/meta.yml b/modules/fgbio/callmolecularconsensusreads/meta.yml index 5f80bce9..3e62c3a6 100644 --- a/modules/fgbio/callmolecularconsensusreads/meta.yml +++ b/modules/fgbio/callmolecularconsensusreads/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@sruthipsuresh" diff --git a/modules/fgbio/sortbam/functions.nf b/modules/fgbio/sortbam/functions.nf index da9da093..85628ee0 100644 --- a/modules/fgbio/sortbam/functions.nf +++ b/modules/fgbio/sortbam/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 798d5f23..81ac89c2 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process FGBIO_SORTBAM { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,7 +33,10 @@ process FGBIO_SORTBAM { -i $bam \\ $options.args \\ -o ${prefix}.bam - fgbio --version | sed -e "s/fgbio v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fgbio --version | sed -e "s/fgbio v//g") + END_VERSIONS """ } diff --git a/modules/fgbio/sortbam/meta.yml b/modules/fgbio/sortbam/meta.yml index 9e68dac4..def106c3 100644 --- a/modules/fgbio/sortbam/meta.yml +++ b/modules/fgbio/sortbam/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@sruthipsuresh" diff --git a/modules/flash/functions.nf b/modules/flash/functions.nf index da9da093..85628ee0 100644 --- a/modules/flash/functions.nf +++ b/modules/flash/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/flash/main.nf b/modules/flash/main.nf index acdc10a7..8b8d99e4 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process FLASH { output: tuple val(meta), path("*.merged.*.fastq.gz"), emit: reads - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process FLASH { $merged \\ -z \\ $input_reads - echo \$(flash --version) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(flash --version) + END_VERSIONS """ } diff --git a/modules/flash/meta.yml b/modules/flash/meta.yml index ff747912..62d40e20 100644 --- a/modules/flash/meta.yml +++ b/modules/flash/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@Erkison" diff --git a/modules/gatk4/applybqsr/functions.nf b/modules/gatk4/applybqsr/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/applybqsr/functions.nf +++ b/modules/gatk4/applybqsr/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 19b8c3d6..9c51ce60 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process GATK4_APPLYBQSR { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process GATK4_APPLYBQSR { -O ${prefix}.bam \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index 9bf12f09..b0177c76 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Recalibrated BAM file diff --git a/modules/gatk4/baserecalibrator/functions.nf b/modules/gatk4/baserecalibrator/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/baserecalibrator/functions.nf +++ b/modules/gatk4/baserecalibrator/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 4e2730f2..9abca6e9 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process GATK4_BASERECALIBRATOR { output: tuple val(meta), path("*.table"), emit: table - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process GATK4_BASERECALIBRATOR { $options.args \ -O ${prefix}.table - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 0996dcbe..a5bac064 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - table: type: file description: Recalibration table from BaseRecalibrator diff --git a/modules/gatk4/bedtointervallist/functions.nf b/modules/gatk4/bedtointervallist/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/bedtointervallist/functions.nf +++ b/modules/gatk4/bedtointervallist/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index af385f8f..fc484f84 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process GATK4_BEDTOINTERVALLIST { output: tuple val(meta), path('*.interval_list'), emit: interval_list - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process GATK4_BEDTOINTERVALLIST { -O ${prefix}.interval_list \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/bedtointervallist/meta.yml b/modules/gatk4/bedtointervallist/meta.yml index 23e98447..28fd5d22 100644 --- a/modules/gatk4/bedtointervallist/meta.yml +++ b/modules/gatk4/bedtointervallist/meta.yml @@ -34,6 +34,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/createsequencedictionary/functions.nf b/modules/gatk4/createsequencedictionary/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/createsequencedictionary/functions.nf +++ b/modules/gatk4/createsequencedictionary/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 0276e8b2..0c0446c6 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { output: path "*.dict" , emit: dict - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process GATK4_CREATESEQUENCEDICTIONARY { --URI $fasta \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/createsequencedictionary/meta.yml b/modules/gatk4/createsequencedictionary/meta.yml index d0cc5980..21bdc599 100644 --- a/modules/gatk4/createsequencedictionary/meta.yml +++ b/modules/gatk4/createsequencedictionary/meta.yml @@ -25,6 +25,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/gatk4/fastqtosam/functions.nf b/modules/gatk4/fastqtosam/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/fastqtosam/functions.nf +++ b/modules/gatk4/fastqtosam/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index e00f34f5..e7b38f35 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GATK4_FASTQTOSAM { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process GATK4_FASTQTOSAM { -SM $prefix \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/fastqtosam/meta.yml b/modules/gatk4/fastqtosam/meta.yml index d574d67b..4ae9eeaa 100644 --- a/modules/gatk4/fastqtosam/meta.yml +++ b/modules/gatk4/fastqtosam/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Converted BAM file diff --git a/modules/gatk4/getpileupsummaries/functions.nf b/modules/gatk4/getpileupsummaries/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/getpileupsummaries/functions.nf +++ b/modules/gatk4/getpileupsummaries/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 9ae95d9c..da03555c 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process GATK4_GETPILEUPSUMMARIES { output: tuple val(meta), path('*.pileups.table'), emit: table - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,6 +43,9 @@ process GATK4_GETPILEUPSUMMARIES { -O ${prefix}.pileups.table \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index e784595a..5bb87e80 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -51,7 +51,7 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@GCJMackenzie" diff --git a/modules/gatk4/haplotypecaller/functions.nf b/modules/gatk4/haplotypecaller/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/haplotypecaller/functions.nf +++ b/modules/gatk4/haplotypecaller/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index c451de7f..02fd1ee3 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process GATK4_HAPLOTYPECALLER { output: tuple val(meta), path("*.vcf.gz"), emit: vcf tuple val(meta), path("*.tbi") , emit: tbi - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -47,6 +47,9 @@ process GATK4_HAPLOTYPECALLER { -O ${prefix}.vcf.gz \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/haplotypecaller/meta.yml b/modules/gatk4/haplotypecaller/meta.yml index f0fc3910..4b8e8387 100644 --- a/modules/gatk4/haplotypecaller/meta.yml +++ b/modules/gatk4/haplotypecaller/meta.yml @@ -49,7 +49,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: Compressed VCF file diff --git a/modules/gatk4/intervallisttools/functions.nf b/modules/gatk4/intervallisttools/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/intervallisttools/functions.nf +++ b/modules/gatk4/intervallisttools/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index b2486eac..90a77c5a 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GATK4_INTERVALLISTTOOLS { output: tuple val(meta), path("*_split/*/*.interval_list"), emit: interval_list - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -48,6 +48,9 @@ process GATK4_INTERVALLISTTOOLS { os.rename(interval, newName) CODE - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/intervallisttools/meta.yml b/modules/gatk4/intervallisttools/meta.yml index efe31da1..65adb7b6 100644 --- a/modules/gatk4/intervallisttools/meta.yml +++ b/modules/gatk4/intervallisttools/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - interval_list: type: file description: Interval list files diff --git a/modules/gatk4/markduplicates/functions.nf b/modules/gatk4/markduplicates/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/markduplicates/functions.nf +++ b/modules/gatk4/markduplicates/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 6df2a47c..68b17366 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process GATK4_MARKDUPLICATES { output: tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.metrics"), emit: metrics - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process GATK4_MARKDUPLICATES { --OUTPUT ${prefix}.bam \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/markduplicates/meta.yml b/modules/gatk4/markduplicates/meta.yml index abe61e02..58e30910 100644 --- a/modules/gatk4/markduplicates/meta.yml +++ b/modules/gatk4/markduplicates/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Marked duplicates BAM file diff --git a/modules/gatk4/mergebamalignment/functions.nf b/modules/gatk4/mergebamalignment/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/mergebamalignment/functions.nf +++ b/modules/gatk4/mergebamalignment/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index b65f4653..269836a7 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process GATK4_MERGEBAMALIGNMENT { output: tuple val(meta), path('*.bam'), emit: bam - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process GATK4_MERGEBAMALIGNMENT { O=${prefix}.bam \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/mergebamalignment/meta.yml b/modules/gatk4/mergebamalignment/meta.yml index 155b6364..e2e7b7ec 100644 --- a/modules/gatk4/mergebamalignment/meta.yml +++ b/modules/gatk4/mergebamalignment/meta.yml @@ -40,6 +40,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/mergevcfs/functions.nf b/modules/gatk4/mergevcfs/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/mergevcfs/functions.nf +++ b/modules/gatk4/mergevcfs/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index 5a80c9ff..d47aa68f 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process GATK4_MERGEVCFS { output: tuple val(meta), path('*.vcf.gz'), emit: vcf - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -44,6 +44,9 @@ process GATK4_MERGEVCFS { $ref \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/mergevcfs/meta.yml b/modules/gatk4/mergevcfs/meta.yml index 14b28fa0..d2679ab8 100644 --- a/modules/gatk4/mergevcfs/meta.yml +++ b/modules/gatk4/mergevcfs/meta.yml @@ -37,6 +37,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/mutect2/functions.nf b/modules/gatk4/mutect2/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/mutect2/functions.nf +++ b/modules/gatk4/mutect2/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 6ab9e1c7..03bcc2d1 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -35,7 +35,7 @@ process GATK4_MUTECT2 { tuple val(meta), path("*.tbi") , emit: tbi tuple val(meta), path("*.stats") , emit: stats tuple val(meta), path("*.f1r2.tar.gz"), optional:true, emit: f1r2 - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -72,6 +72,9 @@ process GATK4_MUTECT2 { -O ${prefix}.vcf.gz \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 7833d694..75b38153 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -87,7 +87,7 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@GCJMackenzie" diff --git a/modules/gatk4/revertsam/functions.nf b/modules/gatk4/revertsam/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/revertsam/functions.nf +++ b/modules/gatk4/revertsam/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 2f4959db..e691d3f9 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GATK4_REVERTSAM { output: tuple val(meta), path('*.bam'), emit: bam - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process GATK4_REVERTSAM { O=${prefix}.reverted.bam \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/revertsam/meta.yml b/modules/gatk4/revertsam/meta.yml index 258bb253..d6a1d7fa 100644 --- a/modules/gatk4/revertsam/meta.yml +++ b/modules/gatk4/revertsam/meta.yml @@ -30,6 +30,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/samtofastq/functions.nf b/modules/gatk4/samtofastq/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/samtofastq/functions.nf +++ b/modules/gatk4/samtofastq/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 486d6b5d..edf895bb 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GATK4_SAMTOFASTQ { output: tuple val(meta), path('*.fastq.gz'), emit: fastq - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process GATK4_SAMTOFASTQ { $output \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/samtofastq/meta.yml b/modules/gatk4/samtofastq/meta.yml index 0601f3a7..956d2186 100644 --- a/modules/gatk4/samtofastq/meta.yml +++ b/modules/gatk4/samtofastq/meta.yml @@ -30,6 +30,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/splitncigarreads/functions.nf b/modules/gatk4/splitncigarreads/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/splitncigarreads/functions.nf +++ b/modules/gatk4/splitncigarreads/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index a8724e2e..11d6c9a5 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process GATK4_SPLITNCIGARREADS { output: tuple val(meta), path('*.bam'), emit: bam - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process GATK4_SPLITNCIGARREADS { -O ${prefix}.bam \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index e433cbf6..c4266874 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -35,6 +35,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/variantfiltration/functions.nf b/modules/gatk4/variantfiltration/functions.nf index da9da093..85628ee0 100644 --- a/modules/gatk4/variantfiltration/functions.nf +++ b/modules/gatk4/variantfiltration/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 82593d18..90b6ef25 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process GATK4_VARIANTFILTRATION { output: tuple val(meta), path("*.vcf"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: @@ -39,6 +39,9 @@ process GATK4_VARIANTFILTRATION { -O ${prefix}.vcf \\ $options.args - echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index d7f72582..6b0a9026 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -42,6 +42,6 @@ output: - version: type: file description: File containing software version - pattern: "*.version.txt" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/genmap/index/functions.nf b/modules/genmap/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/genmap/index/functions.nf +++ b/modules/genmap/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/genmap/index/main.nf b/modules/genmap/index/main.nf index 8166315e..f1168d4e 100644 --- a/modules/genmap/index/main.nf +++ b/modules/genmap/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GENMAP_INDEX { output: path "genmap" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process GENMAP_INDEX { -F $fasta \\ -I genmap - echo \$(genmap --version 2>&1) | sed 's/GenMap version: //; s/SeqAn.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + END_VERSIONS """ } diff --git a/modules/genmap/index/meta.yml b/modules/genmap/index/meta.yml index 332c3ba7..cd299da2 100644 --- a/modules/genmap/index/meta.yml +++ b/modules/genmap/index/meta.yml @@ -21,7 +21,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: index description: Genmap index file diff --git a/modules/genmap/mappability/functions.nf b/modules/genmap/mappability/functions.nf index da9da093..85628ee0 100644 --- a/modules/genmap/mappability/functions.nf +++ b/modules/genmap/mappability/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/genmap/mappability/main.nf b/modules/genmap/mappability/main.nf index 8587c950..9eeb4253 100644 --- a/modules/genmap/mappability/main.nf +++ b/modules/genmap/mappability/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process GENMAP_MAPPABILITY { path "*.wig" , optional:true, emit: wig path "*.bedgraph" , optional:true, emit: bedgraph path "*.txt" , optional:true, emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process GENMAP_MAPPABILITY { -I $index \\ -O mappability - echo \$(genmap --version 2>&1) | sed 's/GenMap version: //; s/SeqAn.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + END_VERSIONS """ } diff --git a/modules/genmap/mappability/meta.yml b/modules/genmap/mappability/meta.yml index 851119c9..90807077 100644 --- a/modules/genmap/mappability/meta.yml +++ b/modules/genmap/mappability/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - wig: type: file description: genmap wig mappability file diff --git a/modules/gffread/functions.nf b/modules/gffread/functions.nf index da9da093..85628ee0 100644 --- a/modules/gffread/functions.nf +++ b/modules/gffread/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 6b0dd666..1622e98d 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GFFREAD { output: path "*.gtf" , emit: gtf - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process GFFREAD { $gff \\ $options.args \\ -o ${prefix}.gtf - echo \$(gffread --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gffread --version 2>&1) + END_VERSIONS """ } diff --git a/modules/gffread/meta.yml b/modules/gffread/meta.yml index af2abb6e..1cb7fc91 100644 --- a/modules/gffread/meta.yml +++ b/modules/gffread/meta.yml @@ -27,7 +27,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@emiller88" diff --git a/modules/glnexus/functions.nf b/modules/glnexus/functions.nf index da9da093..85628ee0 100644 --- a/modules/glnexus/functions.nf +++ b/modules/glnexus/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index dadb9d60..60f50932 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process GLNEXUS { output: tuple val(meta), path("*.bcf"), emit: bcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -44,6 +44,9 @@ process GLNEXUS { $options.args \\ ${input.join(' ')} \\ > ${prefix}.bcf - echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(glnexus_cli 2>&1 | head -n 1 | sed 's/^.*release //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/glnexus/meta.yml b/modules/glnexus/meta.yml index f64a812e..fd1a407d 100644 --- a/modules/glnexus/meta.yml +++ b/modules/glnexus/meta.yml @@ -27,7 +27,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bcf: type: file description: merged BCF file diff --git a/modules/graphmap2/align/functions.nf b/modules/graphmap2/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/graphmap2/align/functions.nf +++ b/modules/graphmap2/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index 0ee4c02b..cf598b3d 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process GRAPHMAP2_ALIGN { output: tuple val(meta), path("*.sam"), emit: sam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process GRAPHMAP2_ALIGN { -o ${prefix}.sam \\ $options.args - echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(graphmap2 align 2>&1 | sed 's/^.*Version: v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/graphmap2/align/meta.yml b/modules/graphmap2/align/meta.yml index da773ed2..a5b3cd6c 100644 --- a/modules/graphmap2/align/meta.yml +++ b/modules/graphmap2/align/meta.yml @@ -44,7 +44,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" - "@drpatelh" diff --git a/modules/graphmap2/index/functions.nf b/modules/graphmap2/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/graphmap2/index/functions.nf +++ b/modules/graphmap2/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index 17811d4c..906aa6ec 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process GRAPHMAP2_INDEX { output: path "*.gmidx" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process GRAPHMAP2_INDEX { $options.args \\ -r $fasta - echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(graphmap2 align 2>&1 | sed 's/^.*Version: v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/graphmap2/index/meta.yml b/modules/graphmap2/index/meta.yml index b74f985a..4ff63276 100644 --- a/modules/graphmap2/index/meta.yml +++ b/modules/graphmap2/index/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" - "@drpatelh" diff --git a/modules/gubbins/functions.nf b/modules/gubbins/functions.nf index da9da093..85628ee0 100644 --- a/modules/gubbins/functions.nf +++ b/modules/gubbins/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index 7f0041c8..10117ae7 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -30,7 +30,7 @@ process GUBBINS { path "*.branch_base_reconstruction.embl", emit: embl_branch path "*.final_tree.tre" , emit: tree path "*.node_labelled.final_tree.tre" , emit: tree_labelled - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process GUBBINS { --threads $task.cpus \\ $options.args \\ $alignment - echo \$(run_gubbins.py --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(run_gubbins.py --version 2>&1) + END_VERSIONS """ } diff --git a/modules/gubbins/meta.yml b/modules/gubbins/meta.yml index d1410afb..1a49b335 100644 --- a/modules/gubbins/meta.yml +++ b/modules/gubbins/meta.yml @@ -19,7 +19,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - fasta: type: file description: Filtered variant alignment in fasta format diff --git a/modules/gunzip/functions.nf b/modules/gunzip/functions.nf index da9da093..85628ee0 100644 --- a/modules/gunzip/functions.nf +++ b/modules/gunzip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index 29248796..a53a9858 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process GUNZIP { output: path "$gunzip", emit: gunzip - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) gunzip = archive.toString() - '.gz' """ gunzip -f $options.args $archive - echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(gunzip --version 2>&1 | sed 's/^.*(gzip) //; s/ Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index 922e74e6..60911685 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -21,7 +21,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/hifiasm/functions.nf b/modules/hifiasm/functions.nf index da9da093..85628ee0 100644 --- a/modules/hifiasm/functions.nf +++ b/modules/hifiasm/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 5d005ee4..2597afa9 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -34,7 +34,7 @@ process HIFIASM { tuple val(meta), path("*.asm.a_ctg.gfa") , emit: alternate_contigs, optional: true tuple val(meta), path("*.hap1.p_ctg.gfa") , emit: paternal_contigs , optional: true tuple val(meta), path("*.hap2.p_ctg.gfa") , emit: maternal_contigs , optional: true - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -49,7 +49,10 @@ process HIFIASM { -2 $maternal_kmer_dump \\ $reads - echo \$(hifiasm --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + END_VERSIONS """ } else { // Phasing with Hi-C data is not supported yet """ @@ -59,7 +62,10 @@ process HIFIASM { -t $task.cpus \\ $reads - echo \$(hifiasm --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + END_VERSIONS """ } } diff --git a/modules/hifiasm/meta.yml b/modules/hifiasm/meta.yml index dc414b93..c6d5a735 100644 --- a/modules/hifiasm/meta.yml +++ b/modules/hifiasm/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - raw_unitigs: type: file description: Raw unitigs diff --git a/modules/hisat2/align/functions.nf b/modules/hisat2/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/hisat2/align/functions.nf +++ b/modules/hisat2/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 17547c91..21eb3c7d 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process HISAT2_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam tuple val(meta), path("*.log"), emit: summary - path "*.version.txt" , emit: version + path "versions.yml" , emit: version tuple val(meta), path("*fastq.gz"), optional:true, emit: fastq @@ -59,7 +59,10 @@ process HISAT2_ALIGN { $options.args \\ | samtools view -bS -F 4 -F 256 - > ${prefix}.bam - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } else { def unaligned = params.save_unaligned ? "--un-conc-gz ${prefix}.unmapped.fastq.gz" : '' @@ -87,7 +90,10 @@ process HISAT2_ALIGN { mv ${prefix}.unmapped.fastq.2.gz ${prefix}.unmapped_2.fastq.gz fi - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } } diff --git a/modules/hisat2/align/meta.yml b/modules/hisat2/align/meta.yml index bf5570fd..799f1808 100644 --- a/modules/hisat2/align/meta.yml +++ b/modules/hisat2/align/meta.yml @@ -51,7 +51,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@ntoda03" diff --git a/modules/hisat2/build/functions.nf b/modules/hisat2/build/functions.nf index da9da093..85628ee0 100644 --- a/modules/hisat2/build/functions.nf +++ b/modules/hisat2/build/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index 3e74b1d4..ae24a6aa 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process HISAT2_BUILD { output: path "hisat2" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def avail_mem = 0 @@ -65,6 +65,9 @@ process HISAT2_BUILD { $fasta \\ hisat2/${fasta.baseName} - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/hisat2/build/meta.yml b/modules/hisat2/build/meta.yml index 2f34d9d8..1d3fc7e6 100644 --- a/modules/hisat2/build/meta.yml +++ b/modules/hisat2/build/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: file description: HISAT2 genome index file diff --git a/modules/hisat2/extractsplicesites/functions.nf b/modules/hisat2/extractsplicesites/functions.nf index da9da093..85628ee0 100644 --- a/modules/hisat2/extractsplicesites/functions.nf +++ b/modules/hisat2/extractsplicesites/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index d97fdb89..3387cbd1 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,12 +25,15 @@ process HISAT2_EXTRACTSPLICESITES { output: path "*.splice_sites.txt", emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/hisat2/extractsplicesites/meta.yml b/modules/hisat2/extractsplicesites/meta.yml index 228138a8..3befc4dd 100644 --- a/modules/hisat2/extractsplicesites/meta.yml +++ b/modules/hisat2/extractsplicesites/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - splicesites: type: file description: Splices sites in gtf file diff --git a/modules/hmmer/hmmalign/functions.nf b/modules/hmmer/hmmalign/functions.nf index da9da093..85628ee0 100644 --- a/modules/hmmer/hmmalign/functions.nf +++ b/modules/hmmer/hmmalign/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index 0446707b..a4166fcb 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process HMMER_HMMALIGN { output: tuple val(meta), path("*.sthlm.gz"), emit: sthlm - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process HMMER_HMMALIGN { $hmm \\ - | gzip -c > ${meta.id}.sthlm.gz - echo \$(hmmalign -h | grep -o '^# HMMER [0-9.]*') | sed 's/^# HMMER *//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') + END_VERSIONS """ } diff --git a/modules/hmmer/hmmalign/meta.yml b/modules/hmmer/hmmalign/meta.yml index 563d227d..60020b32 100644 --- a/modules/hmmer/hmmalign/meta.yml +++ b/modules/hmmer/hmmalign/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - sthlm: type: file description: Multiple alignment in gzipped Stockholm format diff --git a/modules/homer/annotatepeaks/functions.nf b/modules/homer/annotatepeaks/functions.nf index da9da093..85628ee0 100644 --- a/modules/homer/annotatepeaks/functions.nf +++ b/modules/homer/annotatepeaks/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 22dbb955..198ae1fe 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process HOMER_ANNOTATEPEAKS { output: tuple val(meta), path("*annotatePeaks.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process HOMER_ANNOTATEPEAKS { -cpu $task.cpus \\ > ${prefix}.annotatePeaks.txt - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/homer/annotatepeaks/meta.yml b/modules/homer/annotatepeaks/meta.yml index a22b9618..f311741b 100644 --- a/modules/homer/annotatepeaks/meta.yml +++ b/modules/homer/annotatepeaks/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/homer/findpeaks/functions.nf b/modules/homer/findpeaks/functions.nf index da9da093..85628ee0 100644 --- a/modules/homer/findpeaks/functions.nf +++ b/modules/homer/findpeaks/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index 5fcacc1d..fe8399a1 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] def options = initOptions(params.options) @@ -25,7 +25,7 @@ process HOMER_FINDPEAKS { output: tuple val(meta), path("*peaks.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process HOMER_FINDPEAKS { $options.args \\ -o ${prefix}.peaks.txt - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/homer/findpeaks/meta.yml b/modules/homer/findpeaks/meta.yml index d19199d7..51932688 100644 --- a/modules/homer/findpeaks/meta.yml +++ b/modules/homer/findpeaks/meta.yml @@ -32,6 +32,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/homer/maketagdirectory/functions.nf b/modules/homer/maketagdirectory/functions.nf index da9da093..85628ee0 100644 --- a/modules/homer/maketagdirectory/functions.nf +++ b/modules/homer/maketagdirectory/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index e0358bc1..daf0ce60 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] def options = initOptions(params.options) @@ -26,7 +26,7 @@ process HOMER_MAKETAGDIRECTORY { output: tuple val(meta), path("tag_dir"), emit: tagdir - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process HOMER_MAKETAGDIRECTORY { $bed \\ -genome $fasta - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml index 78dee297..7a35857b 100644 --- a/modules/homer/maketagdirectory/meta.yml +++ b/modules/homer/maketagdirectory/meta.yml @@ -36,6 +36,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/homer/makeucscfile/functions.nf b/modules/homer/makeucscfile/functions.nf index da9da093..85628ee0 100644 --- a/modules/homer/makeucscfile/functions.nf +++ b/modules/homer/makeucscfile/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index 876d834f..5b23e243 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] def options = initOptions(params.options) @@ -25,7 +25,7 @@ process HOMER_MAKEUCSCFILE { output: tuple val(meta), path("tag_dir/*ucsc.bedGraph.gz"), emit: bedGraph - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process HOMER_MAKEUCSCFILE { -o auto $options.args - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/homer/makeucscfile/meta.yml b/modules/homer/makeucscfile/meta.yml index 891cb295..e63e979a 100644 --- a/modules/homer/makeucscfile/meta.yml +++ b/modules/homer/makeucscfile/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/iqtree/functions.nf b/modules/iqtree/functions.nf index da9da093..85628ee0 100644 --- a/modules/iqtree/functions.nf +++ b/modules/iqtree/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index 3bd0f3b1..28e07207 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process IQTREE { output: path "*.treefile", emit: phylogeny - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process IQTREE { -ntmax $task.cpus \\ -mem $memory \\ - echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version \\([0-9\\.]*\\) .*\$/\\1/' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(iqtree -version 2>&1 | sed 's/^IQ-TREE multicore version \\([0-9\\.]*\\) .*\$/\\1/') + END_VERSIONS """ } diff --git a/modules/iqtree/meta.yml b/modules/iqtree/meta.yml index 19f81b15..426ad0cf 100644 --- a/modules/iqtree/meta.yml +++ b/modules/iqtree/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - phylogeny: type: file description: A phylogeny in Newick format diff --git a/modules/ivar/consensus/functions.nf b/modules/ivar/consensus/functions.nf index da9da093..85628ee0 100644 --- a/modules/ivar/consensus/functions.nf +++ b/modules/ivar/consensus/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 1b1019cf..7c4a5b57 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process IVAR_CONSENSUS { tuple val(meta), path("*.fa") , emit: fasta tuple val(meta), path("*.qual.txt"), emit: qual tuple val(meta), path("*.mpileup") , optional:true, emit: mpileup - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process IVAR_CONSENSUS { $options.args \\ -p $prefix - echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/ivar/consensus/meta.yml b/modules/ivar/consensus/meta.yml index 913a7660..2a95c51c 100644 --- a/modules/ivar/consensus/meta.yml +++ b/modules/ivar/consensus/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@andersgs" - "@drpatelh" diff --git a/modules/ivar/trim/functions.nf b/modules/ivar/trim/functions.nf index da9da093..85628ee0 100644 --- a/modules/ivar/trim/functions.nf +++ b/modules/ivar/trim/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index afdc99e4..e9b1e23b 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process IVAR_TRIM { output: tuple val(meta), path("*.bam"), emit: bam tuple val(meta), path('*.log'), emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process IVAR_TRIM { -p $prefix \\ > ${prefix}.ivar.log - echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/ivar/trim/meta.yml b/modules/ivar/trim/meta.yml index 5791db66..762a9fe9 100644 --- a/modules/ivar/trim/meta.yml +++ b/modules/ivar/trim/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@andersgs" - "@drpatelh" diff --git a/modules/ivar/variants/functions.nf b/modules/ivar/variants/functions.nf index da9da093..85628ee0 100644 --- a/modules/ivar/variants/functions.nf +++ b/modules/ivar/variants/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index 154f309c..505d72fb 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process IVAR_VARIANTS { output: tuple val(meta), path("*.tsv") , emit: tsv tuple val(meta), path("*.mpileup"), optional:true, emit: mpileup - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process IVAR_VARIANTS { -r $fasta \\ -p $prefix - echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/ivar/variants/meta.yml b/modules/ivar/variants/meta.yml index 7a5fbbc0..37eb9133 100644 --- a/modules/ivar/variants/meta.yml +++ b/modules/ivar/variants/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@andersgs" - "@drpatelh" diff --git a/modules/kallisto/index/functions.nf b/modules/kallisto/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/kallisto/index/functions.nf +++ b/modules/kallisto/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 85eb7f0d..00ae9601 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process KALLISTO_INDEX { output: path "kallisto" , emit: idx - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process KALLISTO_INDEX { -i kallisto \\ $fasta - echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kallisto 2>&1 | sed 's/^kallisto //; s/Usage.*\$//') + END_VERSIONS """ } diff --git a/modules/kallisto/index/meta.yml b/modules/kallisto/index/meta.yml index 24b44b0b..ba4855b0 100644 --- a/modules/kallisto/index/meta.yml +++ b/modules/kallisto/index/meta.yml @@ -21,7 +21,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - idx: type: index description: Kallisto genome index diff --git a/modules/kallistobustools/count/functions.nf b/modules/kallistobustools/count/functions.nf index da9da093..85628ee0 100644 --- a/modules/kallistobustools/count/functions.nf +++ b/modules/kallistobustools/count/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index cb561a9d..309bd57c 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process KALLISTOBUSTOOLS_COUNT { output: tuple val(meta), path ("*.count"), emit: count - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -51,6 +51,9 @@ process KALLISTOBUSTOOLS_COUNT { ${reads[0]} \\ ${reads[1]} - echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + END_VERSIONS """ } diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index 688dfdef..41cf91a0 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -61,7 +61,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@flowuenne" diff --git a/modules/kallistobustools/ref/functions.nf b/modules/kallistobustools/ref/functions.nf index da9da093..85628ee0 100644 --- a/modules/kallistobustools/ref/functions.nf +++ b/modules/kallistobustools/ref/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index 93935696..bc9b32f5 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process KALLISTOBUSTOOLS_REF { val workflow output: - path "*.version.txt" , emit: version + path "versions.yml" , emit: version path "kb_ref_out.idx" , emit: index path "t2g.txt" , emit: t2g path "cdna.fa" , emit: cdna @@ -45,7 +45,10 @@ process KALLISTOBUSTOOLS_REF { $fasta \\ $gtf - echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + END_VERSIONS """ } else { """ @@ -61,7 +64,10 @@ process KALLISTOBUSTOOLS_REF { $fasta \\ $gtf - echo \$(kb 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + END_VERSIONS """ } } diff --git a/modules/kallistobustools/ref/meta.yml b/modules/kallistobustools/ref/meta.yml index c2a85b37..b9f50f20 100644 --- a/modules/kallistobustools/ref/meta.yml +++ b/modules/kallistobustools/ref/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - kb_ref_idx: type: file description: Index file from kb ref. diff --git a/modules/kleborate/functions.nf b/modules/kleborate/functions.nf index da9da093..85628ee0 100644 --- a/modules/kleborate/functions.nf +++ b/modules/kleborate/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index ef7eab23..0079071f 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process KLEBORATE { output: tuple val(meta), path("*.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process KLEBORATE { --outfile ${prefix}.results.txt \\ --assemblies *.fasta - echo \$(kleborate -v 2>&1) | sed 's/kleborate //;' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kleborate -v 2>&1 | sed 's/kleborate //;') + END_VERSIONS """ } diff --git a/modules/kleborate/meta.yml b/modules/kleborate/meta.yml index 19643033..0394a626 100644 --- a/modules/kleborate/meta.yml +++ b/modules/kleborate/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - txt: type: file description: Result file generated after screening diff --git a/modules/kraken2/kraken2/functions.nf b/modules/kraken2/kraken2/functions.nf index da9da093..85628ee0 100644 --- a/modules/kraken2/kraken2/functions.nf +++ b/modules/kraken2/kraken2/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index ea0b72fd..9a01389a 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process KRAKEN2_KRAKEN2 { tuple val(meta), path('*classified*') , emit: classified tuple val(meta), path('*unclassified*'), emit: unclassified tuple val(meta), path('*report.txt') , emit: txt - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -48,6 +48,9 @@ process KRAKEN2_KRAKEN2 { pigz -p $task.cpus *.fastq - echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(kraken2 --version 2>&1 | sed 's/^.*Kraken version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/meta.yml b/modules/kraken2/kraken2/meta.yml index cb1ec0de..3996fbc0 100644 --- a/modules/kraken2/kraken2/meta.yml +++ b/modules/kraken2/kraken2/meta.yml @@ -53,7 +53,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/last/dotplot/functions.nf b/modules/last/dotplot/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/dotplot/functions.nf +++ b/modules/last/dotplot/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index 3644a18e..ca30bbff 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process LAST_DOTPLOT { output: tuple val(meta), path("*.gif"), optional:true, emit: gif tuple val(meta), path("*.png"), optional:true, emit: png - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process LAST_DOTPLOT { $prefix.$format # last-dotplot has no --version option so let's use lastal from the same suite - lastal --version | sed 's/lastal //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastal --version | sed 's/lastal //') + END_VERSIONS """ } diff --git a/modules/last/dotplot/meta.yml b/modules/last/dotplot/meta.yml index ab57dab7..fa092b4c 100644 --- a/modules/last/dotplot/meta.yml +++ b/modules/last/dotplot/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@charles-plessy" diff --git a/modules/last/lastal/functions.nf b/modules/last/lastal/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/lastal/functions.nf +++ b/modules/last/lastal/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index e42653cc..3d6518a4 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process LAST_LASTAL { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process LAST_LASTAL { # gzip needs --no-name otherwise it puts a timestamp in the file, # which makes its checksum non-reproducible. - echo \$(lastal --version 2>&1) | sed 's/lastal //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + END_VERSIONS """ } diff --git a/modules/last/lastal/meta.yml b/modules/last/lastal/meta.yml index 2237c75a..1f8fde9c 100644 --- a/modules/last/lastal/meta.yml +++ b/modules/last/lastal/meta.yml @@ -42,7 +42,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - maf: type: file description: Gzipped MAF (Multiple Alignment Format) file diff --git a/modules/last/lastdb/functions.nf b/modules/last/lastdb/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/lastdb/functions.nf +++ b/modules/last/lastdb/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index a8cd4921..ac552e7d 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LAST_LASTDB { output: tuple val(meta), path("lastdb"), emit: index - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process LAST_LASTDB { lastdb/${prefix} \\ $fastx - echo \$(lastdb --version 2>&1) | sed 's/lastdb //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + END_VERSIONS """ } diff --git a/modules/last/lastdb/meta.yml b/modules/last/lastdb/meta.yml index 64e4d3e3..cddbc29c 100644 --- a/modules/last/lastdb/meta.yml +++ b/modules/last/lastdb/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: directory description: directory containing the files of the LAST index diff --git a/modules/last/mafconvert/functions.nf b/modules/last/mafconvert/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/mafconvert/functions.nf +++ b/modules/last/mafconvert/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index eea53dd1..e112cbd8 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -32,7 +32,7 @@ process LAST_MAFCONVERT { tuple val(meta), path("*.psl.gz"), optional:true, emit: psl_gz tuple val(meta), path("*.sam.gz"), optional:true, emit: sam_gz tuple val(meta), path("*.tab.gz"), optional:true, emit: tab_gz - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process LAST_MAFCONVERT { > ${prefix}.${format}.gz # maf-convert has no --version option but lastdb (part of the same package) has. - echo \$(lastdb --version 2>&1) | sed 's/lastdb //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + END_VERSIONS """ } diff --git a/modules/last/mafconvert/meta.yml b/modules/last/mafconvert/meta.yml index 0ab87457..f0912ccd 100644 --- a/modules/last/mafconvert/meta.yml +++ b/modules/last/mafconvert/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - axt_gz: type: file description: Gzipped pairwise alignment in Axt (Blastz) format (optional) diff --git a/modules/last/mafswap/functions.nf b/modules/last/mafswap/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/mafswap/functions.nf +++ b/modules/last/mafswap/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index 03292c81..f597693c 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LAST_MAFSWAP { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -32,6 +32,9 @@ process LAST_MAFSWAP { maf-swap $options.args $maf | gzip --no-name > ${prefix}.swapped.maf.gz # maf-swap has no --version option but lastdb, part of the same package, has. - echo \$(lastdb --version 2>&1) | sed 's/lastdb //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + END_VERSIONS """ } diff --git a/modules/last/mafswap/meta.yml b/modules/last/mafswap/meta.yml index eb35a46c..8821ab47 100644 --- a/modules/last/mafswap/meta.yml +++ b/modules/last/mafswap/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@charles-plessy" diff --git a/modules/last/postmask/functions.nf b/modules/last/postmask/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/postmask/functions.nf +++ b/modules/last/postmask/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index 677b23f6..d3fa02e3 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LAST_POSTMASK { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process LAST_POSTMASK { last-postmask $options.args $maf | gzip --no-name > ${prefix}.maf.gz # last-postmask does not have a --version option - echo \$(lastal --version 2>&1) | sed 's/lastal //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + END_VERSIONS """ } diff --git a/modules/last/postmask/meta.yml b/modules/last/postmask/meta.yml index 45eaa2b9..d3a184eb 100644 --- a/modules/last/postmask/meta.yml +++ b/modules/last/postmask/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@charles-plessy" diff --git a/modules/last/split/functions.nf b/modules/last/split/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/split/functions.nf +++ b/modules/last/split/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index a6fe1dda..78d59ed4 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LAST_SPLIT { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -31,6 +31,9 @@ process LAST_SPLIT { """ zcat < $maf | last-split $options.args | gzip --no-name > ${prefix}.maf.gz - echo \$(last-split --version 2>&1) | sed 's/last-split //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(last-split --version 2>&1 | sed 's/last-split //') + END_VERSIONS """ } diff --git a/modules/last/split/meta.yml b/modules/last/split/meta.yml index 73f37784..7b11bcd5 100644 --- a/modules/last/split/meta.yml +++ b/modules/last/split/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - maf: type: file description: Multiple Aligment Format (MAF) file, compressed with gzip diff --git a/modules/last/train/functions.nf b/modules/last/train/functions.nf index da9da093..85628ee0 100644 --- a/modules/last/train/functions.nf +++ b/modules/last/train/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index cc1fa544..39728ced 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process LAST_TRAIN { output: tuple val(meta), path("*.par"), emit: param_file - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process LAST_TRAIN { $fastx \\ > ${prefix}.\$INDEX_NAME.par - lastdb --version | sed 's/lastdb //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lastdb --version | sed 's/lastdb //') + END_VERSIONS """ } diff --git a/modules/last/train/meta.yml b/modules/last/train/meta.yml index 5796b764..820e4bc8 100644 --- a/modules/last/train/meta.yml +++ b/modules/last/train/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - param_file: type: file description: Trained parameter file diff --git a/modules/lib/functions.nf b/modules/lib/functions.nf index da9da093..85628ee0 100644 --- a/modules/lib/functions.nf +++ b/modules/lib/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/lofreq/call/functions.nf b/modules/lofreq/call/functions.nf index da9da093..85628ee0 100644 --- a/modules/lofreq/call/functions.nf +++ b/modules/lofreq/call/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index d342c929..b205f041 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process LOFREQ_CALL { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process LOFREQ_CALL { -o ${prefix}.vcf.gz \\ $bam - echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + END_VERSIONS """ } diff --git a/modules/lofreq/call/meta.yml b/modules/lofreq/call/meta.yml index f14115d2..16d23cd9 100644 --- a/modules/lofreq/call/meta.yml +++ b/modules/lofreq/call/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: VCF output file diff --git a/modules/lofreq/callparallel/functions.nf b/modules/lofreq/callparallel/functions.nf index da9da093..85628ee0 100644 --- a/modules/lofreq/callparallel/functions.nf +++ b/modules/lofreq/callparallel/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 4392c700..2bea68f2 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process LOFREQ_CALLPARALLEL { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process LOFREQ_CALLPARALLEL { -o ${prefix}.vcf.gz \\ $bam - echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + END_VERSIONS """ } diff --git a/modules/lofreq/callparallel/meta.yml b/modules/lofreq/callparallel/meta.yml index 3154f412..15257180 100644 --- a/modules/lofreq/callparallel/meta.yml +++ b/modules/lofreq/callparallel/meta.yml @@ -43,7 +43,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: Predicted variants file diff --git a/modules/lofreq/filter/functions.nf b/modules/lofreq/filter/functions.nf index da9da093..85628ee0 100644 --- a/modules/lofreq/filter/functions.nf +++ b/modules/lofreq/filter/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index a2b53f08..693cef23 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LOFREQ_FILTER { output: tuple val(meta), path("*.gz"), emit: vcf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process LOFREQ_FILTER { -i $vcf \\ -o ${prefix}.vcf.gz - echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + END_VERSIONS """ } diff --git a/modules/lofreq/filter/meta.yml b/modules/lofreq/filter/meta.yml index 9de18d4d..9aa92da7 100644 --- a/modules/lofreq/filter/meta.yml +++ b/modules/lofreq/filter/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: VCF filtered output file diff --git a/modules/lofreq/indelqual/functions.nf b/modules/lofreq/indelqual/functions.nf index da9da093..85628ee0 100644 --- a/modules/lofreq/indelqual/functions.nf +++ b/modules/lofreq/indelqual/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index befd8d7a..89c79c39 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process LOFREQ_INDELQUAL { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process LOFREQ_INDELQUAL { -o ${prefix}.bam \\ $bam - echo \$(lofreq version 2>&1) | sed 's/^.*lofreq //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^.*lofreq //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/lofreq/indelqual/meta.yml b/modules/lofreq/indelqual/meta.yml index 294cf17d..34f296d7 100644 --- a/modules/lofreq/indelqual/meta.yml +++ b/modules/lofreq/indelqual/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: BAM file with indel qualities inserted into it diff --git a/modules/macs2/callpeak/functions.nf b/modules/macs2/callpeak/functions.nf index da9da093..85628ee0 100644 --- a/modules/macs2/callpeak/functions.nf +++ b/modules/macs2/callpeak/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index 67d686c6..4fcd6b05 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process MACS2_CALLPEAK { output: tuple val(meta), path("*.{narrowPeak,broadPeak}"), emit: peak tuple val(meta), path("*.xls") , emit: xls - path "*.version.txt" , emit: version + path "versions.yml" , emit: version tuple val(meta), path("*.gappedPeak"), optional:true, emit: gapped tuple val(meta), path("*.bed") , optional:true, emit: bed @@ -46,6 +46,9 @@ process MACS2_CALLPEAK { --treatment $ipbam \\ $control - macs2 --version | sed -e "s/macs2 //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(macs2 --version | sed -e "s/macs2 //g") + END_VERSIONS """ } diff --git a/modules/malt/build/functions.nf b/modules/malt/build/functions.nf index da9da093..85628ee0 100644 --- a/modules/malt/build/functions.nf +++ b/modules/malt/build/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index a7e3751b..3b494c0c 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process MALT_BUILD { output: path "malt_index/" , emit: index - path "*.version.txt" , emit: version + path "versions.yml" , emit: version path "malt-build.log", emit: log script: @@ -51,6 +51,9 @@ process MALT_BUILD { $options.args \\ -mdb ${map_db}/*.db |&tee malt-build.log - malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2 > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) + END_VERSIONS """ } diff --git a/modules/malt/build/meta.yml b/modules/malt/build/meta.yml index 5ace4d29..f1668b94 100644 --- a/modules/malt/build/meta.yml +++ b/modules/malt/build/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: directory description: Directory containing MALT database index directory diff --git a/modules/malt/run/functions.nf b/modules/malt/run/functions.nf index da9da093..85628ee0 100644 --- a/modules/malt/run/functions.nf +++ b/modules/malt/run/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index 8add081c..689dabf4 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process MALT_RUN { path "*.rma6" , emit: rma6 path "*.{tab,text,sam}", optional:true, emit: alignments path "*.log" , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -49,6 +49,9 @@ process MALT_RUN { -m $mode \\ --index $index/ |&tee malt-run.log - echo \$(malt-run --help 2>&1) | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') + END_VERSIONS """ } diff --git a/modules/malt/run/meta.yml b/modules/malt/run/meta.yml index 30421a48..3ad78622 100644 --- a/modules/malt/run/meta.yml +++ b/modules/malt/run/meta.yml @@ -35,7 +35,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - rma6: type: file description: MEGAN6 RMA6 file diff --git a/modules/maltextract/functions.nf b/modules/maltextract/functions.nf index da9da093..85628ee0 100644 --- a/modules/maltextract/functions.nf +++ b/modules/maltextract/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf index d7402cb8..426a9fc3 100644 --- a/modules/maltextract/main.nf +++ b/modules/maltextract/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process MALTEXTRACT { output: path "results" , emit: results - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process MALTEXTRACT { -o results/ \\ $options.args - echo \$(MaltExtract --help | head -n 2 | tail -n 1) | sed 's/MaltExtract version//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') + END_VERSIONS """ } diff --git a/modules/maltextract/meta.yml b/modules/maltextract/meta.yml index 3cb20fa2..29271753 100644 --- a/modules/maltextract/meta.yml +++ b/modules/maltextract/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - results: type: directory description: Directory containing MaltExtract text results files diff --git a/modules/mash/sketch/functions.nf b/modules/mash/sketch/functions.nf index da9da093..85628ee0 100644 --- a/modules/mash/sketch/functions.nf +++ b/modules/mash/sketch/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index 3cc7e199..ed018b1a 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process MASH_SKETCH { output: tuple val(meta), path("*.msh") , emit: mash tuple val(meta), path("*.mash_stats") , emit: stats - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process MASH_SKETCH { -o ${prefix} \\ -r $reads \\ 2> ${prefix}.mash_stats - echo \$(mash --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(mash --version 2>&1) + END_VERSIONS """ } diff --git a/modules/metaphlan3/functions.nf b/modules/metaphlan3/functions.nf index da9da093..85628ee0 100644 --- a/modules/metaphlan3/functions.nf +++ b/modules/metaphlan3/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 6c75c2a0..8893c2ab 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process METAPHLAN3 { tuple val(meta), path("*_profile.txt") , emit: profile tuple val(meta), path("*.biom") , emit: biom tuple val(meta), path('*.bowtie2out.txt'), optional:true, emit: bt2out - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process METAPHLAN3 { --bowtie2db ${metaphlan_db} \\ --biom ${prefix}.biom \\ --output_file ${prefix}_profile.txt - echo \$(metaphlan --version 2>&1) | awk '{print \$3}' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(metaphlan --version 2>&1 | awk '{print \$3}') + END_VERSIONS """ } diff --git a/modules/metaphlan3/meta.yml b/modules/metaphlan3/meta.yml index e0d54d88..d9f9f520 100644 --- a/modules/metaphlan3/meta.yml +++ b/modules/metaphlan3/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - profile: type: file description: Tab-separated output file of the predicted taxon relative abundances diff --git a/modules/methyldackel/extract/functions.nf b/modules/methyldackel/extract/functions.nf index da9da093..85628ee0 100644 --- a/modules/methyldackel/extract/functions.nf +++ b/modules/methyldackel/extract/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index 4c7da3f4..149f4aa0 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process METHYLDACKEL_EXTRACT { output: tuple val(meta), path("*.bedGraph"), emit: bedgraph - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process METHYLDACKEL_EXTRACT { $fasta \\ $bam - echo \$(MethylDackel --version 2>&1) | cut -f1 -d" " > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + END_VERSIONS """ } diff --git a/modules/methyldackel/extract/meta.yml b/modules/methyldackel/extract/meta.yml index 525ddebe..7219bb81 100644 --- a/modules/methyldackel/extract/meta.yml +++ b/modules/methyldackel/extract/meta.yml @@ -52,6 +52,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/methyldackel/mbias/functions.nf b/modules/methyldackel/mbias/functions.nf index da9da093..85628ee0 100644 --- a/modules/methyldackel/mbias/functions.nf +++ b/modules/methyldackel/mbias/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index 7c18197f..9fa39b82 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process METHYLDACKEL_MBIAS { output: tuple val(meta), path("*.mbias.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process METHYLDACKEL_MBIAS { --txt \\ > ${prefix}.mbias.txt - echo \$(MethylDackel --version 2>&1) | cut -f1 -d" " > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + END_VERSIONS """ } diff --git a/modules/methyldackel/mbias/meta.yml b/modules/methyldackel/mbias/meta.yml index e41d0208..a6f58d09 100644 --- a/modules/methyldackel/mbias/meta.yml +++ b/modules/methyldackel/mbias/meta.yml @@ -53,6 +53,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/minia/functions.nf b/modules/minia/functions.nf index da9da093..85628ee0 100644 --- a/modules/minia/functions.nf +++ b/modules/minia/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 9ae79ede..b7aa9272 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process MINIA { tuple val(meta), path('*.contigs.fa'), emit: contigs tuple val(meta), path('*.unitigs.fa'), emit: unitigs tuple val(meta), path('*.h5') , emit: h5 - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process MINIA { -in input_files.txt \\ -out $prefix - echo \$(minia --version 2>&1) | sed 's/^.*Minia version //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(minia --version 2>&1 | sed 's/^.*Minia version //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/minia/meta.yml b/modules/minia/meta.yml index d3a76be8..638cc3ad 100644 --- a/modules/minia/meta.yml +++ b/modules/minia/meta.yml @@ -40,7 +40,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/minimap2/align/functions.nf b/modules/minimap2/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/minimap2/align/functions.nf +++ b/modules/minimap2/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index ec5f6a07..d0ff9c0f 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process MINIMAP2_ALIGN { output: tuple val(meta), path("*.paf"), emit: paf - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process MINIMAP2_ALIGN { $input_reads \\ > ${prefix}.paf - echo \$(minimap2 --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + END_VERSIONS """ } diff --git a/modules/minimap2/align/meta.yml b/modules/minimap2/align/meta.yml index 3c741b16..1cb20473 100644 --- a/modules/minimap2/align/meta.yml +++ b/modules/minimap2/align/meta.yml @@ -41,6 +41,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/minimap2/index/functions.nf b/modules/minimap2/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/minimap2/index/functions.nf +++ b/modules/minimap2/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index e143bd62..cfc40417 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,7 +22,7 @@ process MINIMAP2_INDEX { output: path "*.mmi" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process MINIMAP2_INDEX { $options.args \\ $fasta - echo \$(minimap2 --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + END_VERSIONS """ } diff --git a/modules/minimap2/index/meta.yml b/modules/minimap2/index/meta.yml index 065e5c32..c1c43c70 100644 --- a/modules/minimap2/index/meta.yml +++ b/modules/minimap2/index/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" - "@drpatelh" diff --git a/modules/mosdepth/functions.nf b/modules/mosdepth/functions.nf index da9da093..85628ee0 100644 --- a/modules/mosdepth/functions.nf +++ b/modules/mosdepth/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index 6beea37a..c21ea2a8 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -31,7 +31,7 @@ process MOSDEPTH { tuple val(meta), path('*.per-base.bed.gz.csi'), emit: per_base_csi tuple val(meta), path('*.regions.bed.gz') , emit: regions_bed tuple val(meta), path('*.regions.bed.gz.csi') , emit: regions_csi - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,6 +43,9 @@ process MOSDEPTH { $options.args \\ $prefix \\ $bam - echo \$(mosdepth --version 2>&1) | sed 's/^.*mosdepth //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/mosdepth/meta.yml b/modules/mosdepth/meta.yml index d96e474f..4c0be86c 100644 --- a/modules/mosdepth/meta.yml +++ b/modules/mosdepth/meta.yml @@ -70,7 +70,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/msisensor/msi/functions.nf b/modules/msisensor/msi/functions.nf index da9da093..85628ee0 100644 --- a/modules/msisensor/msi/functions.nf +++ b/modules/msisensor/msi/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index b4a0eb3c..41f79b3a 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process MSISENSOR_MSI { tuple val(meta), path("${prefix}_dis") , emit: output_dis tuple val(meta), path("${prefix}_germline"), emit: output_germline tuple val(meta), path("${prefix}_somatic") , emit: output_somatic - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process MSISENSOR_MSI { -o $prefix \\ $options.args - echo \$(msisensor 2>&1) | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + END_VERSIONS """ } diff --git a/modules/msisensor/msi/meta.yml b/modules/msisensor/msi/meta.yml index 214f90e6..c01f74e0 100644 --- a/modules/msisensor/msi/meta.yml +++ b/modules/msisensor/msi/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - txt: type: file description: MSIsensor MSI final report file diff --git a/modules/msisensor/scan/functions.nf b/modules/msisensor/scan/functions.nf index da9da093..85628ee0 100644 --- a/modules/msisensor/scan/functions.nf +++ b/modules/msisensor/scan/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index 57ebeb28..198657ae 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process MSISENSOR_SCAN { output: tuple (val(meta), path("*.tab"), emit: txt) - path ("*.version.txt" , emit: version) + path ("versions.yml" , emit: version) script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process MSISENSOR_SCAN { -o ${prefix}.msisensor_scan.tab \\ $options.args - echo \$(msisensor 2>&1) | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + END_VERSIONS """ } diff --git a/modules/msisensor/scan/meta.yml b/modules/msisensor/scan/meta.yml index 2e4f8f18..940b53a5 100644 --- a/modules/msisensor/scan/meta.yml +++ b/modules/msisensor/scan/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - txt: type: file description: MSIsensor scan output file of homopolymers & minisatellites diff --git a/modules/muscle/functions.nf b/modules/muscle/functions.nf index da9da093..85628ee0 100644 --- a/modules/muscle/functions.nf +++ b/modules/muscle/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index 9f4747a4..ef9bf484 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -30,7 +30,7 @@ process MUSCLE { tuple val(meta), path("*.msf") , optional: true, emit: msf tuple val(meta), path("*.tree"), optional: true, emit: tree path "*.log" , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -55,6 +55,9 @@ process MUSCLE { $html_out \\ $tree_out \\ -loga muscle_msa.log - muscle -version | sed 's/^MUSCLE v//; s/by.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') + END_VERSIONS """ } diff --git a/modules/muscle/meta.yml b/modules/muscle/meta.yml index e0eb5289..845a8284 100644 --- a/modules/muscle/meta.yml +++ b/modules/muscle/meta.yml @@ -51,6 +51,6 @@ output: - version: type: file description: File containing MUSCLE software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@MGordon" diff --git a/modules/nanolyse/functions.nf b/modules/nanolyse/functions.nf index da9da093..85628ee0 100644 --- a/modules/nanolyse/functions.nf +++ b/modules/nanolyse/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index 0abad6cb..84cf579a 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process NANOLYSE { output: tuple val(meta), path("*.fastq.gz"), emit: fastq path "*.log" , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process NANOLYSE { gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log - echo \$(NanoLyse --version 2>&1) | sed -e "s/NanoLyse //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") + END_VERSIONS """ } diff --git a/modules/nanolyse/meta.yml b/modules/nanolyse/meta.yml index aae299da..2411d33d 100644 --- a/modules/nanolyse/meta.yml +++ b/modules/nanolyse/meta.yml @@ -41,6 +41,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/nanoplot/functions.nf b/modules/nanoplot/functions.nf index da9da093..85628ee0 100644 --- a/modules/nanoplot/functions.nf +++ b/modules/nanoplot/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index f5fffe13..86b300f5 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process NANOPLOT { tuple val(meta), path("*.png") , emit: png tuple val(meta), path("*.txt") , emit: txt tuple val(meta), path("*.log") , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process NANOPLOT { $options.args \\ -t $task.cpus \\ $input_file - echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(NanoPlot --version 2>&1 | sed 's/^.*NanoPlot //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/nanoplot/meta.yml b/modules/nanoplot/meta.yml index f1d94312..cf897eb9 100644 --- a/modules/nanoplot/meta.yml +++ b/modules/nanoplot/meta.yml @@ -52,7 +52,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@yuukiiwa" diff --git a/modules/nextclade/functions.nf b/modules/nextclade/functions.nf index da9da093..85628ee0 100755 --- a/modules/nextclade/functions.nf +++ b/modules/nextclade/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 8319f6b1..fabf4520 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process NEXTCLADE { tuple val(meta), path("${prefix}.tree.json") , emit: json_tree tuple val(meta), path("${prefix}.tsv") , emit: tsv tuple val(meta), path("${prefix}.clades.tsv"), optional:true, emit: tsv_clades - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,6 +43,9 @@ process NEXTCLADE { --output-tsv-clades-only ${prefix}.clades.tsv \\ --output-tree ${prefix}.tree.json - echo \$(nextclade --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(nextclade --version 2>&1) + END_VERSIONS """ } diff --git a/modules/nextclade/meta.yml b/modules/nextclade/meta.yml index d321e08f..730b0fa4 100755 --- a/modules/nextclade/meta.yml +++ b/modules/nextclade/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - csv: type: file description: CSV file containing nextclade results diff --git a/modules/optitype/functions.nf b/modules/optitype/functions.nf index da9da093..85628ee0 100644 --- a/modules/optitype/functions.nf +++ b/modules/optitype/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 15e26c95..4f136d7c 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process OPTITYPE { output: tuple val(meta), path("${prefix}"), emit: output - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -48,6 +48,9 @@ process OPTITYPE { OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $options.args --prefix $prefix --outdir $prefix #Couldn't find a nicer way of doing this - cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") + END_VERSIONS """ } diff --git a/modules/optitype/meta.yml b/modules/optitype/meta.yml index 734c4f77..02e5cec1 100644 --- a/modules/optitype/meta.yml +++ b/modules/optitype/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - output: type: file description: OptiType Results Folder diff --git a/modules/pairix/functions.nf b/modules/pairix/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairix/functions.nf +++ b/modules/pairix/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index ff1b8520..684ea7e6 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PAIRIX { output: tuple val(meta), path(pair), path("*.px2"), emit: index - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -32,6 +32,9 @@ process PAIRIX { $options.args \\ $pair - echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairix --help 2>&1 | sed 's/^.*Version: //; s/Usage.*\$//') + END_VERSIONS """ } diff --git a/modules/pairix/meta.yml b/modules/pairix/meta.yml index b0ac1b15..3c43541a 100644 --- a/modules/pairix/meta.yml +++ b/modules/pairix/meta.yml @@ -32,7 +32,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: file description: pair index file diff --git a/modules/pairtools/dedup/functions.nf b/modules/pairtools/dedup/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/dedup/functions.nf +++ b/modules/pairtools/dedup/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index 07c81c6c..5b901a77 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PAIRTOOLS_DEDUP { output: tuple val(meta), path("*.pairs.gz") , emit: pairs tuple val(meta), path("*.pairs.stat"), emit: stat - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process PAIRTOOLS_DEDUP { --output-stats ${prefix}.pairs.stat \\ $input - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/dedup/meta.yml b/modules/pairtools/dedup/meta.yml index 888a52ff..d5a8ae87 100644 --- a/modules/pairtools/dedup/meta.yml +++ b/modules/pairtools/dedup/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - pairs: type: file description: Duplicates removed pairs diff --git a/modules/pairtools/flip/functions.nf b/modules/pairtools/flip/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/flip/functions.nf +++ b/modules/pairtools/flip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index efde6f55..3010b411 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PAIRTOOLS_FLIP { output: tuple val(meta), path("*.flip.gz"), emit: flip - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process PAIRTOOLS_FLIP { -o ${prefix}.flip.gz \\ $sam - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/flip/meta.yml b/modules/pairtools/flip/meta.yml index 50badc23..981e3828 100644 --- a/modules/pairtools/flip/meta.yml +++ b/modules/pairtools/flip/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - flip: type: file description: output file of flip diff --git a/modules/pairtools/parse/functions.nf b/modules/pairtools/parse/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/parse/functions.nf +++ b/modules/pairtools/parse/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index ad3169e1..66c9257b 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process PAIRTOOLS_PARSE { output: tuple val(meta), path("*.pairsam.gz") , emit: pairsam tuple val(meta), path("*.pairsam.stat"), emit: stat - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process PAIRTOOLS_PARSE { -o ${prefix}.pairsam.gz \\ $bam - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/parse/meta.yml b/modules/pairtools/parse/meta.yml index 311aa0ec..940fe5d1 100644 --- a/modules/pairtools/parse/meta.yml +++ b/modules/pairtools/parse/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - pairsam: type: file description: parsed pair file diff --git a/modules/pairtools/restrict/functions.nf b/modules/pairtools/restrict/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/restrict/functions.nf +++ b/modules/pairtools/restrict/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index b9d7c7c7..31f463ad 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PAIRTOOLS_RESTRICT { output: tuple val(meta), path("*.pairs.gz"), emit: restrict - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process PAIRTOOLS_RESTRICT { -o ${prefix}.pairs.gz \\ $pairs - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/restrict/meta.yml b/modules/pairtools/restrict/meta.yml index 28b8eabc..9dfb8f76 100644 --- a/modules/pairtools/restrict/meta.yml +++ b/modules/pairtools/restrict/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - restrict: type: file description: Filtered pairs file diff --git a/modules/pairtools/select/functions.nf b/modules/pairtools/select/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/select/functions.nf +++ b/modules/pairtools/select/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index 680ad555..c9218ea9 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PAIRTOOLS_SELECT { output: tuple val(meta), path("*.selected.pairs.gz") , emit: selected tuple val(meta), path("*.unselected.pairs.gz"), emit: unselected - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process PAIRTOOLS_SELECT { --output-rest ${prefix}.unselected.pairs.gz \\ ${input} - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/select/meta.yml b/modules/pairtools/select/meta.yml index cf256de2..18e97e99 100644 --- a/modules/pairtools/select/meta.yml +++ b/modules/pairtools/select/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - selected: type: file description: Selected pairs file diff --git a/modules/pairtools/sort/functions.nf b/modules/pairtools/sort/functions.nf index da9da093..85628ee0 100644 --- a/modules/pairtools/sort/functions.nf +++ b/modules/pairtools/sort/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index d169d354..27caed7b 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PAIRTOOLS_SORT { output: tuple val(meta), path("*.pairs.gz"), emit: sorted - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process PAIRTOOLS_SORT { -o ${prefix}.pairs.gz \\ $input - echo \$(pairtools --version 2>&1) | sed 's/pairtools.*version //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + END_VERSIONS """ } diff --git a/modules/pairtools/sort/meta.yml b/modules/pairtools/sort/meta.yml index 565961bc..6f36323c 100644 --- a/modules/pairtools/sort/meta.yml +++ b/modules/pairtools/sort/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - sorted: type: file description: Sorted pairs file diff --git a/modules/pangolin/functions.nf b/modules/pangolin/functions.nf index da9da093..85628ee0 100644 --- a/modules/pangolin/functions.nf +++ b/modules/pangolin/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index 5639dd00..5292d1c3 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PANGOLIN { output: tuple val(meta), path('*.csv'), emit: report - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process PANGOLIN { --threads $task.cpus \\ $options.args - echo \$(pangolin --version) | sed "s/pangolin //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pangolin --version | sed "s/pangolin //g") + END_VERSIONS """ } diff --git a/modules/pangolin/meta.yml b/modules/pangolin/meta.yml index 2b2eb952..b1b583e9 100644 --- a/modules/pangolin/meta.yml +++ b/modules/pangolin/meta.yml @@ -27,7 +27,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kevinmenden" - "@drpatelh" diff --git a/modules/pbccs/functions.nf b/modules/pbccs/functions.nf index da9da093..85628ee0 100644 --- a/modules/pbccs/functions.nf +++ b/modules/pbccs/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 95f9908c..ccf17cc4 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process PBCCS { tuple val(meta), path("*.ccs_report.txt" ) , emit: ccs_report_txt tuple val(meta), path("*.ccs_report.json" ) , emit: ccs_report_json tuple val(meta), path("*.zmw_metrics.json.gz"), emit: zmw_metrics - tuple val(meta), path("*.version.txt" ) , emit: version + tuple val(meta), path("versions.yml" ) , emit: version script: def software = getSoftwareName(task.process) @@ -49,6 +49,9 @@ process PBCCS { -j $task.cpus \\ $options.args - echo \$(ccs --version 2>&1) | grep -e 'commit' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(ccs --version 2>&1 | grep -e 'commit') + END_VERSIONS """ } diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index 8ed27abc..eb89d628 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - css: type: file description: Consensus sequences diff --git a/modules/phantompeakqualtools/functions.nf b/modules/phantompeakqualtools/functions.nf index da9da093..85628ee0 100644 --- a/modules/phantompeakqualtools/functions.nf +++ b/modules/phantompeakqualtools/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index 7656420f..166ed8be 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process PHANTOMPEAKQUALTOOLS { tuple val(meta), path("*.out") , emit: spp tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.Rdata"), emit: rdata - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process PHANTOMPEAKQUALTOOLS { """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/picard/collectmultiplemetrics/functions.nf b/modules/picard/collectmultiplemetrics/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/collectmultiplemetrics/functions.nf +++ b/modules/picard/collectmultiplemetrics/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index 81547e84..11ddee9b 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process PICARD_COLLECTMULTIPLEMETRICS { output: tuple val(meta), path("*_metrics"), emit: metrics tuple val(meta), path("*.pdf") , emit: pdf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process PICARD_COLLECTMULTIPLEMETRICS { OUTPUT=${prefix}.CollectMultipleMetrics \\ REFERENCE_SEQUENCE=$fasta - echo \$(picard CollectMultipleMetrics --version 2>&1) | grep -o 'Version.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + END_VERSIONS """ } diff --git a/modules/picard/collectmultiplemetrics/meta.yml b/modules/picard/collectmultiplemetrics/meta.yml index 34006093..a588fd98 100644 --- a/modules/picard/collectmultiplemetrics/meta.yml +++ b/modules/picard/collectmultiplemetrics/meta.yml @@ -44,6 +44,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/collectwgsmetrics/functions.nf b/modules/picard/collectwgsmetrics/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/collectwgsmetrics/functions.nf +++ b/modules/picard/collectwgsmetrics/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 2f01354c..b5d11839 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PICARD_COLLECTWGSMETRICS { output: tuple val(meta), path("*_metrics"), emit: metrics - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -44,6 +44,9 @@ process PICARD_COLLECTWGSMETRICS { OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ REFERENCE_SEQUENCE=$fasta - echo \$(picard CollectWgsMetrics --version 2>&1) | grep -o 'Version.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + END_VERSIONS """ } diff --git a/modules/picard/collectwgsmetrics/meta.yml b/modules/picard/collectwgsmetrics/meta.yml index d8a2d9fb..ec828af5 100644 --- a/modules/picard/collectwgsmetrics/meta.yml +++ b/modules/picard/collectwgsmetrics/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@flowuenne" diff --git a/modules/picard/filtersamreads/functions.nf b/modules/picard/filtersamreads/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/filtersamreads/functions.nf +++ b/modules/picard/filtersamreads/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 22b8c5a8..c7e40d27 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PICARD_FILTERSAMREADS { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,7 +45,10 @@ process PICARD_FILTERSAMREADS { --FILTER $filter \\ $options.args - echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) { """ @@ -58,7 +61,10 @@ process PICARD_FILTERSAMREADS { --READ_LIST_FILE $readlist \\ $options.args - echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } } diff --git a/modules/picard/filtersamreads/meta.yml b/modules/picard/filtersamreads/meta.yml index b5beba90..82f78065 100644 --- a/modules/picard/filtersamreads/meta.yml +++ b/modules/picard/filtersamreads/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@jfy133" diff --git a/modules/picard/markduplicates/functions.nf b/modules/picard/markduplicates/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/markduplicates/functions.nf +++ b/modules/picard/markduplicates/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index ac829515..62cd10c2 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process PICARD_MARKDUPLICATES { tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.bai") , optional:true, emit: bai tuple val(meta), path("*.metrics.txt"), emit: metrics - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process PICARD_MARKDUPLICATES { -O ${prefix}.bam \\ -M ${prefix}.MarkDuplicates.metrics.txt - echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard MarkDuplicates --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } diff --git a/modules/picard/markduplicates/meta.yml b/modules/picard/markduplicates/meta.yml index b651b3a0..db72b5c5 100644 --- a/modules/picard/markduplicates/meta.yml +++ b/modules/picard/markduplicates/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@projectoriented" diff --git a/modules/picard/mergesamfiles/functions.nf b/modules/picard/mergesamfiles/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/mergesamfiles/functions.nf +++ b/modules/picard/mergesamfiles/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 9fd28af6..e9cba284 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PICARD_MERGESAMFILES { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,12 +43,18 @@ process PICARD_MERGESAMFILES { $options.args \\ ${'INPUT='+bam_files.join(' INPUT=')} \\ OUTPUT=${prefix}.bam - echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard MergeSamFiles --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } else { """ ln -s ${bam_files[0]} ${prefix}.bam - echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard MergeSamFiles --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } } diff --git a/modules/picard/mergesamfiles/meta.yml b/modules/picard/mergesamfiles/meta.yml index 4ea9fd17..82ba2a43 100644 --- a/modules/picard/mergesamfiles/meta.yml +++ b/modules/picard/mergesamfiles/meta.yml @@ -35,6 +35,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/sortsam/functions.nf b/modules/picard/sortsam/functions.nf index da9da093..85628ee0 100644 --- a/modules/picard/sortsam/functions.nf +++ b/modules/picard/sortsam/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index bb815c8f..475a30f9 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -1,6 +1,6 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process PICARD_SORTSAM { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -44,6 +44,9 @@ process PICARD_SORTSAM { --OUTPUT ${prefix}.bam \\ --SORT_ORDER $sort_order - echo \$(picard SortSam --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS """ } diff --git a/modules/picard/sortsam/meta.yml b/modules/picard/sortsam/meta.yml index 42de6eab..37d12b91 100644 --- a/modules/picard/sortsam/meta.yml +++ b/modules/picard/sortsam/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Sorted BAM/CRAM/SAM file diff --git a/modules/plasmidid/functions.nf b/modules/plasmidid/functions.nf index da9da093..85628ee0 100644 --- a/modules/plasmidid/functions.nf +++ b/modules/plasmidid/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 986b6451..8be58c57 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -31,7 +31,7 @@ process PLASMIDID { tuple val(meta), path("${prefix}/database/") , emit: database tuple val(meta), path("${prefix}/fasta_files/") , emit: fasta_files tuple val(meta), path("${prefix}/kmer/") , emit: kmer - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -45,6 +45,9 @@ process PLASMIDID { -o . mv NO_GROUP/$prefix ./$prefix - echo \$(plasmidID --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plasmidID --version 2>&1) + END_VERSIONS """ } diff --git a/modules/plasmidid/meta.yml b/modules/plasmidid/meta.yml index b7b188f8..a2689ddf 100644 --- a/modules/plasmidid/meta.yml +++ b/modules/plasmidid/meta.yml @@ -69,7 +69,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/plink/vcf/functions.nf b/modules/plink/vcf/functions.nf index da9da093..85628ee0 100644 --- a/modules/plink/vcf/functions.nf +++ b/modules/plink/vcf/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index 39cc3825..697be55e 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process PLINK_VCF { tuple val(meta), path("*.bim"), emit: bim, optional: true tuple val(meta), path("*.fam"), emit: fam, optional: true - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process PLINK_VCF { --threads $task.cpus \\ --out ${prefix} - echo \$(plink --version 2>&1) | sed 's/^PLINK //' | sed 's/..-bit.*//'> ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + plink: \$( plink --version 2>&1 | sed 's/^PLINK //' | sed 's/..-bit.*//' ) + END_VERSIONS """ } diff --git a/modules/plink/vcf/meta.yml b/modules/plink/vcf/meta.yml index 8673158b..146a0030 100644 --- a/modules/plink/vcf/meta.yml +++ b/modules/plink/vcf/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bed: type: file description: PLINK binary biallelic genotype table diff --git a/modules/preseq/lcextrap/functions.nf b/modules/preseq/lcextrap/functions.nf index da9da093..85628ee0 100644 --- a/modules/preseq/lcextrap/functions.nf +++ b/modules/preseq/lcextrap/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index 2f601e4f..059b81f6 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process PRESEQ_LCEXTRAP { output: tuple val(meta), path("*.ccurve.txt"), emit: ccurve tuple val(meta), path("*.log") , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process PRESEQ_LCEXTRAP { $bam cp .command.err ${prefix}.command.log - echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(preseq 2>&1 | sed 's/^.*Version: //; s/Usage:.*\$//') + END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/meta.yml b/modules/preseq/lcextrap/meta.yml index d1716231..616d8243 100755 --- a/modules/preseq/lcextrap/meta.yml +++ b/modules/preseq/lcextrap/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - ccurve: type: file description: File containing output of Preseq lcextrap diff --git a/modules/prodigal/functions.nf b/modules/prodigal/functions.nf index da9da093..85628ee0 100644 --- a/modules/prodigal/functions.nf +++ b/modules/prodigal/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index 5b73d6f1..6944f86b 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process PRODIGAL { tuple val(meta), path("${prefix}.fna"), emit: nucleotide_fasta tuple val(meta), path("${prefix}.faa"), emit: amino_acid_fasta tuple val(meta), path("${prefix}_all.txt"), emit: all_gene_annotations - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process PRODIGAL { -a "${prefix}.faa" \\ -s "${prefix}_all.txt" - echo \$(prodigal -v 2>&1) | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') + END_VERSIONS """ } diff --git a/modules/prodigal/meta.yml b/modules/prodigal/meta.yml index f48fe96d..c24ca4a3 100644 --- a/modules/prodigal/meta.yml +++ b/modules/prodigal/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Sorted BAM/CRAM/SAM file diff --git a/modules/prokka/functions.nf b/modules/prokka/functions.nf index da9da093..85628ee0 100644 --- a/modules/prokka/functions.nf +++ b/modules/prokka/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index 1fa3f3d9..c2a9d682 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -1,4 +1,4 @@ -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -35,7 +35,7 @@ process PROKKA { tuple val(meta), path("${prefix}/*.log"), emit: log tuple val(meta), path("${prefix}/*.txt"), emit: txt tuple val(meta), path("${prefix}/*.tsv"), emit: tsv - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -51,6 +51,9 @@ process PROKKA { $prodigal_tf \\ $fasta - echo \$(prokka --version 2>&1) | sed 's/^.*prokka //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(prokka --version 2>&1 | sed 's/^.*prokka //') + END_VERSIONS """ } diff --git a/modules/prokka/meta.yml b/modules/prokka/meta.yml index 4489b2fd..26fb767a 100644 --- a/modules/prokka/meta.yml +++ b/modules/prokka/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - gff: type: file description: annotation in GFF3 format, containing both sequences and annotations diff --git a/modules/pycoqc/functions.nf b/modules/pycoqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/pycoqc/functions.nf +++ b/modules/pycoqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pycoqc/main.nf b/modules/pycoqc/main.nf index 3f010247..f3b164ee 100644 --- a/modules/pycoqc/main.nf +++ b/modules/pycoqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process PYCOQC { output: path "*.html" , emit: html path "*.json" , emit: json - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process PYCOQC { -o pycoqc.html \\ -j pycoqc.json - echo \$(pycoQC --version 2>&1) | sed 's/^.*pycoQC v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/pycoqc/meta.yml b/modules/pycoqc/meta.yml index 059b2f15..32012e83 100644 --- a/modules/pycoqc/meta.yml +++ b/modules/pycoqc/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/pydamage/analyze/functions.nf b/modules/pydamage/analyze/functions.nf index da9da093..85628ee0 100644 --- a/modules/pydamage/analyze/functions.nf +++ b/modules/pydamage/analyze/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 5a2f331b..042e6c74 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PYDAMAGE_ANALYZE { output: tuple val(meta), path("pydamage_results/pydamage_results.csv"), emit: csv - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process PYDAMAGE_ANALYZE { -p $task.cpus \\ $bam - echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pydamage --version 2>&1 | sed -e 's/pydamage, version //g') + END_VERSIONS """ } diff --git a/modules/pydamage/analyze/meta.yml b/modules/pydamage/analyze/meta.yml index 3da9f793..7369a3a3 100644 --- a/modules/pydamage/analyze/meta.yml +++ b/modules/pydamage/analyze/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - csv: type: file description: PyDamage results as csv files diff --git a/modules/pydamage/filter/functions.nf b/modules/pydamage/filter/functions.nf index da9da093..85628ee0 100644 --- a/modules/pydamage/filter/functions.nf +++ b/modules/pydamage/filter/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 0010a7e0..9cb95b4a 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process PYDAMAGE_FILTER { output: tuple val(meta), path("pydamage_results/pydamage_filtered_results.csv"), emit: csv - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process PYDAMAGE_FILTER { $options.args \\ $csv - echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(pydamage --version 2>&1 | sed -e 's/pydamage, version //g') + END_VERSIONS """ } diff --git a/modules/pydamage/filter/meta.yml b/modules/pydamage/filter/meta.yml index 0870636b..29d4642b 100644 --- a/modules/pydamage/filter/meta.yml +++ b/modules/pydamage/filter/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - csv: type: file description: PyDamage filtered results as csv file diff --git a/modules/qcat/functions.nf b/modules/qcat/functions.nf index da9da093..85628ee0 100644 --- a/modules/qcat/functions.nf +++ b/modules/qcat/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index 8fc0814a..be239816 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process QCAT { output: tuple val(meta), path("fastq/*.fastq.gz"), emit: reads - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -47,6 +47,9 @@ process QCAT { ## Zip fastq files gzip fastq/* - echo \$(qcat --version 2>&1) | sed 's/^.*qcat //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/qcat/meta.yml b/modules/qcat/meta.yml index 3280f26e..5946eaa8 100644 --- a/modules/qcat/meta.yml +++ b/modules/qcat/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@yuukiiwa" - "@drpatelh" diff --git a/modules/qualimap/bamqc/functions.nf b/modules/qualimap/bamqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/qualimap/bamqc/functions.nf +++ b/modules/qualimap/bamqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index 463cde4c..17779e27 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process QUALIMAP_BAMQC { output: tuple val(meta), path("${prefix}"), emit: results - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -56,6 +56,9 @@ process QUALIMAP_BAMQC { -outdir $prefix \\ -nt $task.cpus - echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(qualimap 2>&1 | sed 's/^.*QualiMap v.//; s/Built.*\$//') + END_VERSIONS """ } diff --git a/modules/qualimap/bamqc/meta.yml b/modules/qualimap/bamqc/meta.yml index 3c608f31..74c3ffdf 100644 --- a/modules/qualimap/bamqc/meta.yml +++ b/modules/qualimap/bamqc/meta.yml @@ -44,6 +44,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@phue" diff --git a/modules/qualimap/rnaseq/functions.nf b/modules/qualimap/rnaseq/functions.nf index da9da093..85628ee0 100644 --- a/modules/qualimap/rnaseq/functions.nf +++ b/modules/qualimap/rnaseq/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 96fe2cd4..d1ed1021 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process QUALIMAP_RNASEQ { output: tuple val(meta), path("${prefix}"), emit: results - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -52,6 +52,9 @@ process QUALIMAP_RNASEQ { $paired_end \\ -outdir $prefix - echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(qualimap 2>&1 | sed 's/^.*QualiMap v.//; s/Built.*\$//') + END_VERSIONS """ } diff --git a/modules/quast/functions.nf b/modules/quast/functions.nf index da9da093..85628ee0 100644 --- a/modules/quast/functions.nf +++ b/modules/quast/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 0b94c410..97ff93e2 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process QUAST { output: path "${prefix}" , emit: results path '*.tsv' , emit: tsv - path '*.version.txt', emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,6 +43,9 @@ process QUAST { $options.args \\ ${consensus.join(' ')} ln -s ${prefix}/report.tsv - echo \$(quast.py --version 2>&1) | sed 's/^.*QUAST v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/quast/meta.yml b/modules/quast/meta.yml index cc79486e..742dc8f0 100644 --- a/modules/quast/meta.yml +++ b/modules/quast/meta.yml @@ -39,7 +39,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rapidnj/functions.nf b/modules/rapidnj/functions.nf index da9da093..85628ee0 100644 --- a/modules/rapidnj/functions.nf +++ b/modules/rapidnj/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index 78ed7693..a46fbfe8 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process RAPIDNJ { output: path "*.sth" , emit: stockholm_alignment path "*.tre" , emit: phylogeny - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -39,6 +39,9 @@ process RAPIDNJ { -x rapidnj_phylogeny.tre # Doesn't appear to be a way of getting the version number - echo 2.3.2 > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo 2.3.2) + END_VERSIONS """ } diff --git a/modules/rapidnj/meta.yml b/modules/rapidnj/meta.yml index cf2c61fc..7f7da9b9 100644 --- a/modules/rapidnj/meta.yml +++ b/modules/rapidnj/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - phylogeny: type: file description: A phylogeny in Newick format diff --git a/modules/rasusa/functions.nf b/modules/rasusa/functions.nf index da9da093..85628ee0 100644 --- a/modules/rasusa/functions.nf +++ b/modules/rasusa/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index f895e1a2..88f3a208 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process RASUSA { output: tuple val(meta), path('*.fastq.gz'), emit: reads - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process RASUSA { --genome-size $genome_size \\ --input $reads \\ $output - echo \$(rasusa --version 2>&1) | sed -e "s/rasusa //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") + END_VERSIONS """ } diff --git a/modules/rasusa/meta.yml b/modules/rasusa/meta.yml index 074ab2f0..61cdbe0c 100644 --- a/modules/rasusa/meta.yml +++ b/modules/rasusa/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - reads: type: file description: Reads with subsampled coverage diff --git a/modules/raxmlng/functions.nf b/modules/raxmlng/functions.nf index da9da093..85628ee0 100644 --- a/modules/raxmlng/functions.nf +++ b/modules/raxmlng/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index 9f8597b5..7094eaa7 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process RAXMLNG { output: path "*.raxml.bestTree", emit: phylogeny path "*.raxml.support" , optional:true, emit: phylogeny_bootstrapped - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process RAXMLNG { --threads $task.cpus \\ --prefix output - echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(raxml-ng --version 2>&1 | sed 's/^.*RAxML-NG v. //; s/released.*\$//') + END_VERSIONS """ } diff --git a/modules/raxmlng/meta.yml b/modules/raxmlng/meta.yml index 1df98148..d5f755c5 100644 --- a/modules/raxmlng/meta.yml +++ b/modules/raxmlng/meta.yml @@ -23,7 +23,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - phylogeny: type: file description: A phylogeny in Newick format diff --git a/modules/rsem/calculateexpression/functions.nf b/modules/rsem/calculateexpression/functions.nf index da9da093..85628ee0 100644 --- a/modules/rsem/calculateexpression/functions.nf +++ b/modules/rsem/calculateexpression/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index 1f9ab854..33f34904 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("*.isoforms.results"), emit: counts_transcript tuple val(meta), path("*.stat") , emit: stat tuple val(meta), path("*.log") , emit: logs - path "*.version.txt" , emit: version + path "versions.yml" , emit: version tuple val(meta), path("*.STAR.genome.bam") , optional:true, emit: bam_star tuple val(meta), path("${prefix}.genome.bam") , optional:true, emit: bam_genome @@ -56,6 +56,9 @@ process RSEM_CALCULATEEXPRESSION { \$INDEX \\ $prefix - rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + END_VERSIONS """ } diff --git a/modules/rsem/calculateexpression/meta.yml b/modules/rsem/calculateexpression/meta.yml index f8577085..079751d3 100644 --- a/modules/rsem/calculateexpression/meta.yml +++ b/modules/rsem/calculateexpression/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam_star: type: file description: BAM file generated by STAR (optional) diff --git a/modules/rsem/preparereference/functions.nf b/modules/rsem/preparereference/functions.nf index da9da093..85628ee0 100644 --- a/modules/rsem/preparereference/functions.nf +++ b/modules/rsem/preparereference/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index a378eb6b..560b5a63 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process RSEM_PREPAREREFERENCE { output: path "rsem" , emit: index path "rsem/*transcripts.fa", emit: transcript_fasta - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -50,7 +50,10 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome - rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + END_VERSIONS """ } else { """ @@ -61,7 +64,10 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome - rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + END_VERSIONS """ } } diff --git a/modules/rsem/preparereference/meta.yml b/modules/rsem/preparereference/meta.yml index d7c02154..5ccca28a 100644 --- a/modules/rsem/preparereference/meta.yml +++ b/modules/rsem/preparereference/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/bamstat/functions.nf b/modules/rseqc/bamstat/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/bamstat/functions.nf +++ b/modules/rseqc/bamstat/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index 913c3f53..fa71dd11 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process RSEQC_BAMSTAT { output: tuple val(meta), path("*.bam_stat.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process RSEQC_BAMSTAT { $options.args \\ > ${prefix}.bam_stat.txt - bam_stat.py --version | sed -e "s/bam_stat.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/bamstat/meta.yml b/modules/rseqc/bamstat/meta.yml index ff00d8c9..adb81c1c 100644 --- a/modules/rseqc/bamstat/meta.yml +++ b/modules/rseqc/bamstat/meta.yml @@ -30,7 +30,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/inferexperiment/functions.nf b/modules/rseqc/inferexperiment/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/inferexperiment/functions.nf +++ b/modules/rseqc/inferexperiment/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index a887e6e6..a9842c0d 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process RSEQC_INFEREXPERIMENT { output: tuple val(meta), path("*.infer_experiment.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process RSEQC_INFEREXPERIMENT { $options.args \\ > ${prefix}.infer_experiment.txt - infer_experiment.py --version | sed -e "s/infer_experiment.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/inferexperiment/meta.yml b/modules/rseqc/inferexperiment/meta.yml index 366c3e33..f89f90d1 100644 --- a/modules/rseqc/inferexperiment/meta.yml +++ b/modules/rseqc/inferexperiment/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/innerdistance/functions.nf b/modules/rseqc/innerdistance/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/innerdistance/functions.nf +++ b/modules/rseqc/innerdistance/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index e2e8f909..d98780f1 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process RSEQC_INNERDISTANCE { tuple val(meta), path("*mean.txt") , optional:true, emit: mean tuple val(meta), path("*.pdf") , optional:true, emit: pdf tuple val(meta), path("*.r") , optional:true, emit: rscript - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,11 +43,17 @@ process RSEQC_INNERDISTANCE { > stdout.txt head -n 2 stdout.txt > ${prefix}.inner_distance_mean.txt - inner_distance.py --version | sed -e "s/inner_distance.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + END_VERSIONS """ } else { """ - inner_distance.py --version | sed -e "s/inner_distance.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + END_VERSIONS """ } } diff --git a/modules/rseqc/innerdistance/meta.yml b/modules/rseqc/innerdistance/meta.yml index ed72c51c..5b2b5e79 100644 --- a/modules/rseqc/innerdistance/meta.yml +++ b/modules/rseqc/innerdistance/meta.yml @@ -49,7 +49,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/junctionannotation/functions.nf b/modules/rseqc/junctionannotation/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/junctionannotation/functions.nf +++ b/modules/rseqc/junctionannotation/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index 30bdcd11..cfb12d69 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -30,7 +30,7 @@ process RSEQC_JUNCTIONANNOTATION { tuple val(meta), path("*.Interact.bed"), optional:true, emit: interact_bed tuple val(meta), path("*junction.pdf") , optional:true, emit: pdf tuple val(meta), path("*events.pdf") , optional:true, emit: events_pdf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,6 +43,9 @@ process RSEQC_JUNCTIONANNOTATION { $options.args \\ 2> ${prefix}.junction_annotation.log - junction_annotation.py --version | sed -e "s/junction_annotation.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/junctionannotation/meta.yml b/modules/rseqc/junctionannotation/meta.yml index 64926ce0..d96e7756 100644 --- a/modules/rseqc/junctionannotation/meta.yml +++ b/modules/rseqc/junctionannotation/meta.yml @@ -55,7 +55,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/junctionsaturation/functions.nf b/modules/rseqc/junctionsaturation/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/junctionsaturation/functions.nf +++ b/modules/rseqc/junctionsaturation/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index 837006d0..a5aa5461 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process RSEQC_JUNCTIONSATURATION { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.r") , emit: rscript - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process RSEQC_JUNCTIONSATURATION { -o $prefix \\ $options.args - junction_saturation.py --version | sed -e "s/junction_saturation.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/junctionsaturation/meta.yml b/modules/rseqc/junctionsaturation/meta.yml index 38008577..aaf44cdc 100644 --- a/modules/rseqc/junctionsaturation/meta.yml +++ b/modules/rseqc/junctionsaturation/meta.yml @@ -38,7 +38,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/readdistribution/functions.nf b/modules/rseqc/readdistribution/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/readdistribution/functions.nf +++ b/modules/rseqc/readdistribution/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 1b09908e..56086c89 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process RSEQC_READDISTRIBUTION { output: tuple val(meta), path("*.read_distribution.txt"), emit: txt - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process RSEQC_READDISTRIBUTION { -r $bed \\ > ${prefix}.read_distribution.txt - read_distribution.py --version | sed -e "s/read_distribution.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/readdistribution/meta.yml b/modules/rseqc/readdistribution/meta.yml index ce2b0f5a..7ffab04f 100644 --- a/modules/rseqc/readdistribution/meta.yml +++ b/modules/rseqc/readdistribution/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/rseqc/readduplication/functions.nf b/modules/rseqc/readduplication/functions.nf index da9da093..85628ee0 100644 --- a/modules/rseqc/readduplication/functions.nf +++ b/modules/rseqc/readduplication/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index c86b05b6..ca7c2b13 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process RSEQC_READDUPLICATION { tuple val(meta), path("*pos.DupRate.xls"), emit: pos_xls tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.r") , emit: rscript - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process RSEQC_READDUPLICATION { -o $prefix \\ $options.args - read_duplication.py --version | sed -e "s/read_duplication.py //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") + END_VERSIONS """ } diff --git a/modules/rseqc/readduplication/meta.yml b/modules/rseqc/readduplication/meta.yml index c43ea688..efc48c0d 100644 --- a/modules/rseqc/readduplication/meta.yml +++ b/modules/rseqc/readduplication/meta.yml @@ -45,7 +45,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@kevinmenden" diff --git a/modules/salmon/index/functions.nf b/modules/salmon/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/salmon/index/functions.nf +++ b/modules/salmon/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index e72ff121..df4e2ed8 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process SALMON_INDEX { output: path "salmon" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -46,6 +46,9 @@ process SALMON_INDEX { -d decoys.txt \\ $options.args \\ -i salmon - salmon --version | sed -e "s/salmon //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(salmon --version | sed -e "s/salmon //g") + END_VERSIONS """ } diff --git a/modules/salmon/index/meta.yml b/modules/salmon/index/meta.yml index 652e2a77..4d16b359 100644 --- a/modules/salmon/index/meta.yml +++ b/modules/salmon/index/meta.yml @@ -28,7 +28,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/salmon/quant/functions.nf b/modules/salmon/quant/functions.nf index da9da093..85628ee0 100644 --- a/modules/salmon/quant/functions.nf +++ b/modules/salmon/quant/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 1b9b5803..92d85f58 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process SALMON_QUANT { output: tuple val(meta), path("${prefix}"), emit: results - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -72,6 +72,9 @@ process SALMON_QUANT { $options.args \\ -o $prefix - salmon --version | sed -e "s/salmon //g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(salmon --version | sed -e "s/salmon //g") + END_VERSIONS """ } diff --git a/modules/salmon/quant/meta.yml b/modules/salmon/quant/meta.yml index f37c9884..981df89e 100644 --- a/modules/salmon/quant/meta.yml +++ b/modules/salmon/quant/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/samtools/ampliconclip/functions.nf b/modules/samtools/ampliconclip/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/ampliconclip/functions.nf +++ b/modules/samtools/ampliconclip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 6ec27ccf..4a08026e 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process SAMTOOLS_AMPLICONCLIP { tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.clipstats.txt") , optional:true, emit: stats tuple val(meta), path("*.cliprejects.bam"), optional:true, emit: rejects_bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -46,6 +46,9 @@ process SAMTOOLS_AMPLICONCLIP { -o ${prefix}.bam \\ $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/ampliconclip/meta.yml b/modules/samtools/ampliconclip/meta.yml index fce06986..2ecbf463 100644 --- a/modules/samtools/ampliconclip/meta.yml +++ b/modules/samtools/ampliconclip/meta.yml @@ -46,7 +46,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Clipped reads BAM file diff --git a/modules/samtools/faidx/functions.nf b/modules/samtools/faidx/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/faidx/functions.nf +++ b/modules/samtools/faidx/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index a89ff2bb..fad14602 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,12 +23,15 @@ process SAMTOOLS_FAIDX { output: path "*.fai" , emit: fai - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ samtools faidx $fasta - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index f92234d0..77d21861 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -25,7 +25,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@ewels" diff --git a/modules/samtools/fastq/functions.nf b/modules/samtools/fastq/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/fastq/functions.nf +++ b/modules/samtools/fastq/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 6bedbb4e..73d32db8 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process SAMTOOLS_FASTQ { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process SAMTOOLS_FASTQ { -@ $task.cpus \\ $endedness \\ $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/fastq/meta.yml b/modules/samtools/fastq/meta.yml index ce269552..7c4cc488 100644 --- a/modules/samtools/fastq/meta.yml +++ b/modules/samtools/fastq/meta.yml @@ -37,6 +37,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@suzannejin" diff --git a/modules/samtools/flagstat/functions.nf b/modules/samtools/flagstat/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/flagstat/functions.nf +++ b/modules/samtools/flagstat/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index d4852c66..70c04b23 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,12 +23,15 @@ process SAMTOOLS_FLAGSTAT { output: tuple val(meta), path("*.flagstat"), emit: flagstat - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ samtools flagstat $bam > ${bam}.flagstat - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/flagstat/meta.yml b/modules/samtools/flagstat/meta.yml index 8414bf54..d40e45b4 100644 --- a/modules/samtools/flagstat/meta.yml +++ b/modules/samtools/flagstat/meta.yml @@ -43,6 +43,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/idxstats/functions.nf b/modules/samtools/idxstats/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/idxstats/functions.nf +++ b/modules/samtools/idxstats/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index 14a07cfb..33605f30 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,12 +23,15 @@ process SAMTOOLS_IDXSTATS { output: tuple val(meta), path("*.idxstats"), emit: idxstats - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ samtools idxstats $bam > ${bam}.idxstats - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/idxstats/meta.yml b/modules/samtools/idxstats/meta.yml index 530d0772..93e8f694 100644 --- a/modules/samtools/idxstats/meta.yml +++ b/modules/samtools/idxstats/meta.yml @@ -44,6 +44,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/index/functions.nf b/modules/samtools/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/index/functions.nf +++ b/modules/samtools/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index e1966fb3..83802d95 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,12 +24,15 @@ process SAMTOOLS_INDEX { output: tuple val(meta), path("*.bai"), optional:true, emit: bai tuple val(meta), path("*.csi"), optional:true, emit: csi - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ samtools index $options.args $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 5d076e3b..6f7dc887 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@ewels" diff --git a/modules/samtools/merge/functions.nf b/modules/samtools/merge/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/merge/functions.nf +++ b/modules/samtools/merge/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 0182b9fd..85a41926 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process SAMTOOLS_MERGE { output: tuple val(meta), path("${prefix}.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools merge ${prefix}.bam $bams - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/merge/meta.yml b/modules/samtools/merge/meta.yml index 9092f22e..c5f15a14 100644 --- a/modules/samtools/merge/meta.yml +++ b/modules/samtools/merge/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@yuukiiwa " diff --git a/modules/samtools/mpileup/functions.nf b/modules/samtools/mpileup/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/mpileup/functions.nf +++ b/modules/samtools/mpileup/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index f736e9c7..28185934 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process SAMTOOLS_MPILEUP { output: tuple val(meta), path("*.mpileup"), emit: mpileup - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process SAMTOOLS_MPILEUP { --output ${prefix}.mpileup \\ $options.args \\ $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/mpileup/meta.yml b/modules/samtools/mpileup/meta.yml index 7e432a78..aa0ccc6d 100644 --- a/modules/samtools/mpileup/meta.yml +++ b/modules/samtools/mpileup/meta.yml @@ -41,7 +41,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@joseespinosa" diff --git a/modules/samtools/sort/functions.nf b/modules/samtools/sort/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/sort/functions.nf +++ b/modules/samtools/sort/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 0a6b7048..4c3c4c1f 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process SAMTOOLS_SORT { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/sort/meta.yml b/modules/samtools/sort/meta.yml index 704e8c1f..d4f70a8e 100644 --- a/modules/samtools/sort/meta.yml +++ b/modules/samtools/sort/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@ewels" diff --git a/modules/samtools/stats/functions.nf b/modules/samtools/stats/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/stats/functions.nf +++ b/modules/samtools/stats/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 8c72d725..b1fd325f 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,12 +23,15 @@ process SAMTOOLS_STATS { output: tuple val(meta), path("*.stats"), emit: stats - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ samtools stats $bam > ${bam}.stats - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/stats/meta.yml b/modules/samtools/stats/meta.yml index b549ff5c..1c7dcc8b 100644 --- a/modules/samtools/stats/meta.yml +++ b/modules/samtools/stats/meta.yml @@ -42,6 +42,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/view/functions.nf b/modules/samtools/view/functions.nf index da9da093..85628ee0 100644 --- a/modules/samtools/view/functions.nf +++ b/modules/samtools/view/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 2ca57032..824b9bab 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process SAMTOOLS_VIEW { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools view $options.args $bam > ${prefix}.bam - echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/samtools/view/meta.yml b/modules/samtools/view/meta.yml index c35a8b03..6388f9bc 100644 --- a/modules/samtools/view/meta.yml +++ b/modules/samtools/view/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@joseespinosa" diff --git a/modules/seacr/callpeak/functions.nf b/modules/seacr/callpeak/functions.nf index da9da093..85628ee0 100644 --- a/modules/seacr/callpeak/functions.nf +++ b/modules/seacr/callpeak/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 429c45cf..cc567dfb 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process SEACR_CALLPEAK { output: tuple val(meta), path("*.bed"), emit: bed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process SEACR_CALLPEAK { $options.args \\ $prefix - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/seacr/callpeak/meta.yml b/modules/seacr/callpeak/meta.yml index 579961e2..80da69e4 100644 --- a/modules/seacr/callpeak/meta.yml +++ b/modules/seacr/callpeak/meta.yml @@ -43,6 +43,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@chris-cheshire" diff --git a/modules/seqkit/split2/functions.nf b/modules/seqkit/split2/functions.nf index da9da093..85628ee0 100644 --- a/modules/seqkit/split2/functions.nf +++ b/modules/seqkit/split2/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 5eeb0ad0..44e0046f 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process SEQKIT_SPLIT2 { output: tuple val(meta), path("*.split/*.gz"), emit: reads - path("*.version.txt") , emit: version + path("versions.yml") , emit: version script: @@ -41,7 +41,10 @@ process SEQKIT_SPLIT2 { -1 ${reads} \ --out-dir ${prefix}.split - echo \$(seqkit --version 2>&1) | sed 's/^.*seqkit //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(seqkit --version 2>&1 | sed 's/^.*seqkit //; s/Using.*\$//') + END_VERSIONS """ } else { """ @@ -53,7 +56,10 @@ process SEQKIT_SPLIT2 { -2 ${reads[1]} \ --out-dir ${prefix}.split - echo \$(seqkit --version 2>&1) | sed 's/^.*seqkit //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(seqkit --version 2>&1 | sed 's/^.*seqkit //; s/Using.*\$//') + END_VERSIONS """ } } diff --git a/modules/seqkit/split2/meta.yml b/modules/seqkit/split2/meta.yml index 44ae4ea7..5dfee7f9 100644 --- a/modules/seqkit/split2/meta.yml +++ b/modules/seqkit/split2/meta.yml @@ -33,6 +33,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@FriederikeHanssen" diff --git a/modules/seqtk/sample/functions.nf b/modules/seqtk/sample/functions.nf index da9da093..85628ee0 100644 --- a/modules/seqtk/sample/functions.nf +++ b/modules/seqtk/sample/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index b50a5a2a..d62d8cac 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process SEQTK_SAMPLE { output: tuple val(meta), path("*.fastq.gz"), emit: reads - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,7 +38,10 @@ process SEQTK_SAMPLE { $sample_size \\ | gzip --no-name > ${prefix}.fastq.gz \\ - echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS """ } else { if (!(options.args ==~ /.*-s[0-9]+.*/)) { @@ -59,7 +62,10 @@ process SEQTK_SAMPLE { $sample_size \\ | gzip --no-name > ${prefix}_2.fastq.gz \\ - echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS """ } } diff --git a/modules/seqtk/sample/meta.yml b/modules/seqtk/sample/meta.yml index f9122936..b9422433 100644 --- a/modules/seqtk/sample/meta.yml +++ b/modules/seqtk/sample/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - reads: type: file description: Subsampled FastQ files, 1 for single-end data or 2 for paired-end data. diff --git a/modules/seqtk/subseq/functions.nf b/modules/seqtk/subseq/functions.nf index da9da093..85628ee0 100644 --- a/modules/seqtk/subseq/functions.nf +++ b/modules/seqtk/subseq/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index 810a8a48..2907d282 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process SEQTK_SUBSEQ { output: path "*.gz" , emit: sequences - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -41,6 +41,9 @@ process SEQTK_SUBSEQ { $filter_list | \\ gzip --no-name > ${sequences}${prefix}.${ext}.gz - echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/seqtk/subseq/meta.yml b/modules/seqtk/subseq/meta.yml index 0676229d..f7c6c624 100644 --- a/modules/seqtk/subseq/meta.yml +++ b/modules/seqtk/subseq/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - sequences: type: file description: FASTQ/FASTA file diff --git a/modules/sequenzautils/bam2seqz/functions.nf b/modules/sequenzautils/bam2seqz/functions.nf index da9da093..85628ee0 100755 --- a/modules/sequenzautils/bam2seqz/functions.nf +++ b/modules/sequenzautils/bam2seqz/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index aec19725..ad4f6847 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process SEQUENZAUTILS_BAM2SEQZ { output: tuple val(meta), path("*.seqz.gz"), emit: seqz - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process SEQUENZAUTILS_BAM2SEQZ { -gc $wigfile \\ -o ${prefix}.seqz.gz - echo \$(sequenzautils --version 2>&1) | sed 's/^.*sequenzautils //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(sequenzautils --version 2>&1 | sed 's/^.*sequenzautils //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/sequenzautils/bam2seqz/meta.yml b/modules/sequenzautils/bam2seqz/meta.yml index 171e155c..2ce4ab7f 100755 --- a/modules/sequenzautils/bam2seqz/meta.yml +++ b/modules/sequenzautils/bam2seqz/meta.yml @@ -40,7 +40,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - seqz: type: file description: Seqz file diff --git a/modules/sequenzautils/gcwiggle/functions.nf b/modules/sequenzautils/gcwiggle/functions.nf index da9da093..85628ee0 100755 --- a/modules/sequenzautils/gcwiggle/functions.nf +++ b/modules/sequenzautils/gcwiggle/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index fd200b6b..a0575d7e 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process SEQUENZAUTILS_GCWIGGLE { output: tuple val(meta), path("*.wig.gz"), emit: wig - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -35,6 +35,9 @@ process SEQUENZAUTILS_GCWIGGLE { --fasta $fasta \\ -o ${prefix}.wig.gz - echo \$(sequenzautils --version 2>&1) | sed 's/^.*sequenzautils //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(sequenzautils --version 2>&1 | sed 's/^.*sequenzautils //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/sequenzautils/gcwiggle/meta.yml b/modules/sequenzautils/gcwiggle/meta.yml index e1cb7b03..35daa498 100644 --- a/modules/sequenzautils/gcwiggle/meta.yml +++ b/modules/sequenzautils/gcwiggle/meta.yml @@ -28,7 +28,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - wig: type: file description: GC Wiggle track file diff --git a/modules/seqwish/induce/functions.nf b/modules/seqwish/induce/functions.nf index da9da093..85628ee0 100644 --- a/modules/seqwish/induce/functions.nf +++ b/modules/seqwish/induce/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index ebf714ff..e9b2836b 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process SEQWISH_INDUCE { output: tuple val(meta), path("*.gfa"), emit: gfa - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: @@ -39,6 +39,9 @@ process SEQWISH_INDUCE { --gfa=${prefix}.gfa \\ $options.args - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/seqwish/induce/meta.yml b/modules/seqwish/induce/meta.yml index f357f0df..c2836824 100644 --- a/modules/seqwish/induce/meta.yml +++ b/modules/seqwish/induce/meta.yml @@ -41,6 +41,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/shovill/functions.nf b/modules/shovill/functions.nf index da9da093..85628ee0 100644 --- a/modules/shovill/functions.nf +++ b/modules/shovill/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index e751b2a8..8319e75f 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process SHOVILL { tuple val(meta), path("shovill.log") , emit: log tuple val(meta), path("{skesa,spades,megahit,velvet}.fasta"), emit: raw_contigs tuple val(meta), path("contigs.{fastg,gfa,LastGraph}") , optional:true, emit: gfa - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process SHOVILL { --outdir ./ \\ --force - echo \$(shovill --version 2>&1) | sed 's/^.*shovill //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(shovill --version 2>&1 | sed 's/^.*shovill //' ) + END_VERSIONS """ } diff --git a/modules/shovill/meta.yml b/modules/shovill/meta.yml index b8f24e34..b878f93d 100644 --- a/modules/shovill/meta.yml +++ b/modules/shovill/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - contigs: type: file description: The final assembly produced by Shovill diff --git a/modules/snpdists/functions.nf b/modules/snpdists/functions.nf index da9da093..85628ee0 100644 --- a/modules/snpdists/functions.nf +++ b/modules/snpdists/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index c103bb33..ede94906 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process SNPDISTS { output: tuple val(meta), path("*.tsv"), emit: tsv - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process SNPDISTS { $options.args \\ $alignment > ${prefix}.tsv - echo \$(snp-dists -v 2>&1) | sed 's/snp-dists //;' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') + END_VERSIONS """ } diff --git a/modules/snpdists/meta.yml b/modules/snpdists/meta.yml index 590d034a..e86e3092 100644 --- a/modules/snpdists/meta.yml +++ b/modules/snpdists/meta.yml @@ -36,6 +36,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/snpeff/functions.nf b/modules/snpeff/functions.nf index da9da093..85628ee0 100644 --- a/modules/snpeff/functions.nf +++ b/modules/snpeff/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index af0fd816..aa25a092 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -31,7 +31,7 @@ process SNPEFF { output: tuple val(meta), path("*.ann.vcf"), emit: vcf path "*.csv" , emit: report - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -52,6 +52,9 @@ process SNPEFF { $vcf \\ > ${prefix}.ann.vcf - echo \$(snpEff -version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(snpEff -version 2>&1) + END_VERSIONS """ } diff --git a/modules/snpeff/meta.yml b/modules/snpeff/meta.yml index 7ba62cde..aa21e2bc 100644 --- a/modules/snpeff/meta.yml +++ b/modules/snpeff/meta.yml @@ -52,6 +52,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/snpsites/functions.nf b/modules/snpsites/functions.nf index da9da093..85628ee0 100644 --- a/modules/snpsites/functions.nf +++ b/modules/snpsites/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/snpsites/main.nf b/modules/snpsites/main.nf index 5104572e..5cc85773 100644 --- a/modules/snpsites/main.nf +++ b/modules/snpsites/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process SNPSITES { output: path "*.fas" , emit: fasta path "*.sites.txt" , emit: constant_sites - path "*.version.txt", emit: version + path "versions.yml" , emit: version env CONSTANT_SITES, emit: constant_sites_string script: @@ -38,6 +38,9 @@ process SNPSITES { CONSTANT_SITES=\$(cat constant.sites.txt) - echo \$(snp-sites -V 2>&1) | sed 's/snp-sites //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') + END_VERSIONS """ } diff --git a/modules/snpsites/meta.yml b/modules/snpsites/meta.yml index 0a22b879..ae250e5f 100644 --- a/modules/snpsites/meta.yml +++ b/modules/snpsites/meta.yml @@ -18,7 +18,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - fasta: type: file description: Variant fasta file diff --git a/modules/sortmerna/functions.nf b/modules/sortmerna/functions.nf index da9da093..85628ee0 100644 --- a/modules/sortmerna/functions.nf +++ b/modules/sortmerna/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 32ca4ca9..01975979 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process SORTMERNA { output: tuple val(meta), path("*.fastq.gz"), emit: reads tuple val(meta), path("*.log") , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -47,7 +47,10 @@ process SORTMERNA { gzip -f < non_rRNA_reads.fq > ${prefix}.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log - echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(sortmerna --version 2>&1 | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + END_VERSIONS """ } else { """ @@ -67,7 +70,10 @@ process SORTMERNA { gzip -f < non_rRNA_reads_rev.fq > ${prefix}_2.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log - echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(sortmerna --version 2>&1 | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + END_VERSIONS """ } } diff --git a/modules/spades/functions.nf b/modules/spades/functions.nf index da9da093..85628ee0 100644 --- a/modules/spades/functions.nf +++ b/modules/spades/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/spades/main.nf b/modules/spades/main.nf index e78500f2..a260de54 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process SPADES { tuple val(meta), path('*.gene_clusters.fa'), optional:true, emit: gene_clusters tuple val(meta), path('*.assembly.gfa') , optional:true, emit: gfa tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -62,6 +62,9 @@ process SPADES { mv gene_clusters.fasta ${prefix}.gene_clusters.fa fi - echo \$(spades.py --version 2>&1) | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/spades/meta.yml b/modules/spades/meta.yml index 5a05e5f3..38c5c2ae 100644 --- a/modules/spades/meta.yml +++ b/modules/spades/meta.yml @@ -62,7 +62,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@JoseEspinosa" diff --git a/modules/staphopiasccmec/functions.nf b/modules/staphopiasccmec/functions.nf index da9da093..85628ee0 100644 --- a/modules/staphopiasccmec/functions.nf +++ b/modules/staphopiasccmec/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index 85a61514..0e57128b 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process STAPHOPIASCCMEC { output: tuple val(meta), path("*.tsv"), emit: tsv - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -31,6 +31,9 @@ process STAPHOPIASCCMEC { """ staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv - echo \$(staphopia-sccmec --version 2>&1) | sed 's/^.*staphopia-sccmec //' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') + END_VERSIONS """ } diff --git a/modules/staphopiasccmec/meta.yml b/modules/staphopiasccmec/meta.yml index 2054c6b3..e1ce3a05 100644 --- a/modules/staphopiasccmec/meta.yml +++ b/modules/staphopiasccmec/meta.yml @@ -34,7 +34,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - tsv: type: file description: Tab-delimited results diff --git a/modules/star/align/functions.nf b/modules/star/align/functions.nf index da9da093..85628ee0 100644 --- a/modules/star/align/functions.nf +++ b/modules/star/align/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index d5d88ce8..677d1f2a 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process STAR_ALIGN { tuple val(meta), path('*Log.final.out') , emit: log_final tuple val(meta), path('*Log.out') , emit: log_out tuple val(meta), path('*Log.progress.out'), emit: log_progress - path '*.version.txt' , emit: version + path "versions.yml" , emit: version tuple val(meta), path('*sortedByCoord.out.bam') , optional:true, emit: bam_sorted tuple val(meta), path('*toTranscriptome.out.bam'), optional:true, emit: bam_transcript @@ -68,6 +68,9 @@ process STAR_ALIGN { gzip ${prefix}.unmapped_2.fastq fi - STAR --version | sed -e "s/STAR_//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + END_VERSIONS """ } diff --git a/modules/star/align/meta.yml b/modules/star/align/meta.yml index a589d145..7f0217ea 100644 --- a/modules/star/align/meta.yml +++ b/modules/star/align/meta.yml @@ -48,7 +48,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam_sorted: type: file description: Sorted BAM file of read alignments (optional) diff --git a/modules/star/genomegenerate/functions.nf b/modules/star/genomegenerate/functions.nf index da9da093..85628ee0 100644 --- a/modules/star/genomegenerate/functions.nf +++ b/modules/star/genomegenerate/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index 9335b9b5..5ccb38e8 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process STAR_GENOMEGENERATE { output: path "star" , emit: index - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,7 +43,10 @@ process STAR_GENOMEGENERATE { $memory \\ $options.args - STAR --version | sed -e "s/STAR_//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + END_VERSIONS """ } else { """ @@ -61,7 +64,10 @@ process STAR_GENOMEGENERATE { $memory \\ $options.args - STAR --version | sed -e "s/STAR_//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + END_VERSIONS """ } } diff --git a/modules/star/genomegenerate/meta.yml b/modules/star/genomegenerate/meta.yml index aae0219b..70525738 100644 --- a/modules/star/genomegenerate/meta.yml +++ b/modules/star/genomegenerate/meta.yml @@ -29,7 +29,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/strelka/germline/functions.nf b/modules/strelka/germline/functions.nf index da9da093..85628ee0 100644 --- a/modules/strelka/germline/functions.nf +++ b/modules/strelka/germline/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 48f795b0..d2203fa4 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,7 +29,7 @@ process STRELKA_GERMLINE { tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi tuple val(meta), path("*genome.vcf.gz") , emit: genome_vcf tuple val(meta), path("*genome.vcf.gz.tbi") , emit: genome_vcf_tbi - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -49,6 +49,9 @@ process STRELKA_GERMLINE { mv strelka/results/variants/variants.vcf.gz ${prefix}.variants.vcf.gz mv strelka/results/variants/variants.vcf.gz.tbi ${prefix}.variants.vcf.gz.tbi - echo configureStrelkaGermlineWorkflow.py --version &> ${software}.version.txt #2>&1 + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + strelka: \$( configureStrelkaGermlineWorkflow.py --version ) + END_VERSIONS """ } diff --git a/modules/strelka/germline/meta.yml b/modules/strelka/germline/meta.yml index ae0ecb47..4423e437 100644 --- a/modules/strelka/germline/meta.yml +++ b/modules/strelka/germline/meta.yml @@ -58,6 +58,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@arontommi" diff --git a/modules/stringtie/merge/functions.nf b/modules/stringtie/merge/functions.nf index da9da093..85628ee0 100644 --- a/modules/stringtie/merge/functions.nf +++ b/modules/stringtie/merge/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index f0820be1..85670a91 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process STRINGTIE_MERGE { output: path "stringtie.merged.gtf", emit: gtf - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process STRINGTIE_MERGE { -G $annotation_gtf \\ -o stringtie.merged.gtf - echo \$(stringtie --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + END_VERSIONS """ } diff --git a/modules/stringtie/stringtie/functions.nf b/modules/stringtie/stringtie/functions.nf index da9da093..85628ee0 100644 --- a/modules/stringtie/stringtie/functions.nf +++ b/modules/stringtie/stringtie/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 6cff993a..92986dba 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process STRINGTIE { tuple val(meta), path("*.transcripts.gtf"), emit: transcript_gtf tuple val(meta), path("*.abundance.txt") , emit: abundance tuple val(meta), path("*.ballgown") , emit: ballgown - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -51,6 +51,9 @@ process STRINGTIE { -p $task.cpus \\ $options.args - echo \$(stringtie --version 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + END_VERSIONS """ } diff --git a/modules/stringtie/stringtie/meta.yml b/modules/stringtie/stringtie/meta.yml index 52ec899d..f9363009 100644 --- a/modules/stringtie/stringtie/meta.yml +++ b/modules/stringtie/stringtie/meta.yml @@ -51,6 +51,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/subread/featurecounts/functions.nf b/modules/subread/featurecounts/functions.nf index da9da093..85628ee0 100644 --- a/modules/subread/featurecounts/functions.nf +++ b/modules/subread/featurecounts/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index cde14a74..3e2eb765 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process SUBREAD_FEATURECOUNTS { output: tuple val(meta), path("*featureCounts.txt") , emit: counts tuple val(meta), path("*featureCounts.txt.summary"), emit: summary - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -47,6 +47,9 @@ process SUBREAD_FEATURECOUNTS { -o ${prefix}.featureCounts.txt \\ ${bams.join(' ')} - echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g" > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(featureCounts -v 2>&1 | sed -e "s/featureCounts v//g") + END_VERSIONS """ } diff --git a/modules/subread/featurecounts/meta.yml b/modules/subread/featurecounts/meta.yml index d24f70bd..504d2f48 100644 --- a/modules/subread/featurecounts/meta.yml +++ b/modules/subread/featurecounts/meta.yml @@ -46,7 +46,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@ntoda03" diff --git a/modules/tabix/bgzip/functions.nf b/modules/tabix/bgzip/functions.nf index da9da093..85628ee0 100644 --- a/modules/tabix/bgzip/functions.nf +++ b/modules/tabix/bgzip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index e9d2e96e..eb95de62 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process TABIX_BGZIP { output: tuple val(meta), path("*.gz"), emit: gz - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -31,6 +31,9 @@ process TABIX_BGZIP { """ bgzip -c $options.args $input > ${prefix}.${input.getExtension()}.gz - echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/(.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + END_VERSIONS """ } diff --git a/modules/tabix/bgzip/meta.yml b/modules/tabix/bgzip/meta.yml index 686d72e6..801d98bc 100644 --- a/modules/tabix/bgzip/meta.yml +++ b/modules/tabix/bgzip/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/tabix/bgziptabix/functions.nf b/modules/tabix/bgziptabix/functions.nf index da9da093..85628ee0 100644 --- a/modules/tabix/bgziptabix/functions.nf +++ b/modules/tabix/bgziptabix/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 6cc3322f..7179a97e 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process TABIX_BGZIPTABIX { output: tuple val(meta), path("*.gz"), path("*.tbi"), emit: tbi - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -32,6 +32,9 @@ process TABIX_BGZIPTABIX { bgzip -c $options.args $input > ${prefix}.gz tabix $options.args2 ${prefix}.gz - echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/(.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + END_VERSIONS """ } diff --git a/modules/tabix/bgziptabix/meta.yml b/modules/tabix/bgziptabix/meta.yml index 5bef2350..92f62bf3 100644 --- a/modules/tabix/bgziptabix/meta.yml +++ b/modules/tabix/bgziptabix/meta.yml @@ -39,6 +39,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/tabix/tabix/functions.nf b/modules/tabix/tabix/functions.nf index da9da093..85628ee0 100644 --- a/modules/tabix/tabix/functions.nf +++ b/modules/tabix/tabix/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index df1e84ee..f703a787 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process TABIX_TABIX { output: tuple val(meta), path("*.tbi"), emit: tbi - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) """ tabix $options.args $tab - echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/(.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + END_VERSIONS """ } diff --git a/modules/tabix/tabix/meta.yml b/modules/tabix/tabix/meta.yml index f66270db..1ca58bcf 100644 --- a/modules/tabix/tabix/meta.yml +++ b/modules/tabix/tabix/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/tiddit/sv/functions.nf b/modules/tiddit/sv/functions.nf index da9da093..85628ee0 100644 --- a/modules/tiddit/sv/functions.nf +++ b/modules/tiddit/sv/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index bd42f7d1..b7fe1b03 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process TIDDIT_SV { tuple val(meta), path("*.vcf") , emit: vcf tuple val(meta), path("*.ploidy.tab") , emit: ploidy tuple val(meta), path("*.signals.tab"), emit: signals - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -40,6 +40,9 @@ process TIDDIT_SV { $reference \\ -o $prefix - echo \$(tiddit -h 2>&1) | sed 's/^.*Version: //; s/(.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(tiddit -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + END_VERSIONS """ } diff --git a/modules/tiddit/sv/meta.yml b/modules/tiddit/sv/meta.yml index f1601f72..2a351766 100644 --- a/modules/tiddit/sv/meta.yml +++ b/modules/tiddit/sv/meta.yml @@ -45,6 +45,6 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/trimgalore/functions.nf b/modules/trimgalore/functions.nf index da9da093..85628ee0 100644 --- a/modules/trimgalore/functions.nf +++ b/modules/trimgalore/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 3c16d66f..c002062a 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process TRIMGALORE { output: tuple val(meta), path("*.fq.gz") , emit: reads tuple val(meta), path("*report.txt"), emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version tuple val(meta), path("*.html"), emit: html optional true tuple val(meta), path("*.zip") , emit: zip optional true @@ -60,7 +60,10 @@ process TRIMGALORE { $c_r1 \\ $tpc_r1 \\ ${prefix}.fastq.gz - echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(trim_galore --version 2>&1 | sed 's/^.*version //; s/Last.*\$//') + END_VERSIONS """ } else { """ @@ -77,7 +80,10 @@ process TRIMGALORE { $tpc_r2 \\ ${prefix}_1.fastq.gz \\ ${prefix}_2.fastq.gz - echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(trim_galore --version 2>&1 | sed 's/^.*version //; s/Last.*\$//') + END_VERSIONS """ } } diff --git a/modules/trimgalore/meta.yml b/modules/trimgalore/meta.yml index 73538707..0c9fc925 100644 --- a/modules/trimgalore/meta.yml +++ b/modules/trimgalore/meta.yml @@ -51,7 +51,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@drpatelh" - "@ewels" diff --git a/modules/ucsc/bed12tobigbed/functions.nf b/modules/ucsc/bed12tobigbed/functions.nf index da9da093..85628ee0 100644 --- a/modules/ucsc/bed12tobigbed/functions.nf +++ b/modules/ucsc/bed12tobigbed/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 407379fd..2f9b287b 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process UCSC_BED12TOBIGBED { output: tuple val(meta), path("*.bigBed"), emit: bigbed - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process UCSC_BED12TOBIGBED { $sizes \\ ${prefix}.bigBed - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/ucsc/bed12tobigbed/meta.yml b/modules/ucsc/bed12tobigbed/meta.yml index 7541eb2d..9bd2dd46 100755 --- a/modules/ucsc/bed12tobigbed/meta.yml +++ b/modules/ucsc/bed12tobigbed/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bigbed: type: file description: bigBed file diff --git a/modules/ucsc/bedclip/functions.nf b/modules/ucsc/bedclip/functions.nf index da9da093..85628ee0 100755 --- a/modules/ucsc/bedclip/functions.nf +++ b/modules/ucsc/bedclip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index d1a47554..c001b410 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,7 +26,7 @@ process UCSC_BEDCLIP { output: tuple val(meta), path("*.bedGraph"), emit: bedgraph - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,6 +37,9 @@ process UCSC_BEDCLIP { $sizes \\ ${prefix}.bedGraph - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/ucsc/bedclip/meta.yml b/modules/ucsc/bedclip/meta.yml index f20b4617..b11d2083 100755 --- a/modules/ucsc/bedclip/meta.yml +++ b/modules/ucsc/bedclip/meta.yml @@ -31,7 +31,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bedgraph: type: file description: bedGraph file diff --git a/modules/ucsc/bedgraphtobigwig/functions.nf b/modules/ucsc/bedgraphtobigwig/functions.nf index da9da093..85628ee0 100644 --- a/modules/ucsc/bedgraphtobigwig/functions.nf +++ b/modules/ucsc/bedgraphtobigwig/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index e5fe3b7f..60e046f9 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -26,13 +26,16 @@ process UCSC_BEDGRAPHTOBIGWIG { output: tuple val(meta), path("*.bigWig"), emit: bigwig - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedGraphToBigWig $bedgraph $sizes ${prefix}.bigWig - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/ucsc/bedgraphtobigwig/meta.yml b/modules/ucsc/bedgraphtobigwig/meta.yml index 31365f48..ea20604c 100755 --- a/modules/ucsc/bedgraphtobigwig/meta.yml +++ b/modules/ucsc/bedgraphtobigwig/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bigwig: type: file description: bigWig file diff --git a/modules/ucsc/bigwigaverageoverbed/functions.nf b/modules/ucsc/bigwigaverageoverbed/functions.nf index da9da093..85628ee0 100755 --- a/modules/ucsc/bigwigaverageoverbed/functions.nf +++ b/modules/ucsc/bigwigaverageoverbed/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 0aef3fca..adba3c76 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process UCSC_BIGWIGAVERAGEOVERBED { output: tuple val(meta), path("*.tab") , emit: tab - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -33,6 +33,9 @@ process UCSC_BIGWIGAVERAGEOVERBED { # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. bigWigAverageOverBed ${options.args} $bigwig $bed ${bed.getSimpleName()}.tab - echo \$(bigWigAverageOverBed 2>&1) | sed 's/bigWigAverageOverBed v//; s/ - Compute.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bigWigAverageOverBed 2>&1 | sed 's/bigWigAverageOverBed v//; s/ - Compute.*\$//') + END_VERSIONS """ } diff --git a/modules/ucsc/bigwigaverageoverbed/meta.yml b/modules/ucsc/bigwigaverageoverbed/meta.yml index 1f007f42..93328df0 100644 --- a/modules/ucsc/bigwigaverageoverbed/meta.yml +++ b/modules/ucsc/bigwigaverageoverbed/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - tab: type: file description: tab file diff --git a/modules/ucsc/wigtobigwig/functions.nf b/modules/ucsc/wigtobigwig/functions.nf index da9da093..85628ee0 100755 --- a/modules/ucsc/wigtobigwig/functions.nf +++ b/modules/ucsc/wigtobigwig/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index dee2d0bd..945f07c9 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process UCSC_WIGTOBIGWIG { output: path "*.bw" , emit: bw - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process UCSC_WIGTOBIGWIG { $chromsizes \\ ${wig.getSimpleName()}.bw - echo \$(wigToBigWig 2>&1) | sed 's/wigToBigWig v //; s/ - Convert.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(wigToBigWig 2>&1 | sed 's/wigToBigWig v //; s/ - Convert.*\$//') + END_VERSIONS """ } diff --git a/modules/ucsc/wigtobigwig/meta.yml b/modules/ucsc/wigtobigwig/meta.yml index 6ed4b026..102fd8ef 100644 --- a/modules/ucsc/wigtobigwig/meta.yml +++ b/modules/ucsc/wigtobigwig/meta.yml @@ -27,7 +27,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bw: type: file description: bigwig file diff --git a/modules/umitools/dedup/functions.nf b/modules/umitools/dedup/functions.nf index da9da093..85628ee0 100644 --- a/modules/umitools/dedup/functions.nf +++ b/modules/umitools/dedup/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index f30ab164..0f15c86c 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process UMITOOLS_DEDUP { output: tuple val(meta), path("*.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process UMITOOLS_DEDUP { $paired \\ $options.args - echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + END_VERSIONS """ } diff --git a/modules/umitools/extract/functions.nf b/modules/umitools/extract/functions.nf index da9da093..85628ee0 100644 --- a/modules/umitools/extract/functions.nf +++ b/modules/umitools/extract/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index e5c4e21c..0a5e6636 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process UMITOOLS_EXTRACT { output: tuple val(meta), path("*.fastq.gz"), emit: reads tuple val(meta), path("*.log") , emit: log - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,7 +38,10 @@ process UMITOOLS_EXTRACT { $options.args \\ > ${prefix}.umi_extract.log - echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + END_VERSIONS """ } else { """ @@ -51,7 +54,10 @@ process UMITOOLS_EXTRACT { $options.args \\ > ${prefix}.umi_extract.log - echo \$(umi_tools --version 2>&1) | sed 's/^.*UMI-tools version://; s/ *\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + END_VERSIONS """ } } diff --git a/modules/unicycler/functions.nf b/modules/unicycler/functions.nf index da9da093..85628ee0 100644 --- a/modules/unicycler/functions.nf +++ b/modules/unicycler/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 320c0f29..2b031c42 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process UNICYCLER { tuple val(meta), path('*.scaffolds.fa'), emit: scaffolds tuple val(meta), path('*.assembly.gfa'), emit: gfa tuple val(meta), path('*.log') , emit: log - path '*.version.txt' , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -42,6 +42,9 @@ process UNICYCLER { mv assembly.gfa ${prefix}.assembly.gfa mv unicycler.log ${prefix}.unicycler.log - echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(unicycler --version 2>&1 | sed 's/^.*Unicycler v//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/unicycler/meta.yml b/modules/unicycler/meta.yml index 286b7f67..f6581919 100644 --- a/modules/unicycler/meta.yml +++ b/modules/unicycler/meta.yml @@ -33,7 +33,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - scaffolds: type: file description: Fasta file containing scaffolds @@ -49,7 +49,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@JoseEspinosa" - "@drpatelh" diff --git a/modules/untar/functions.nf b/modules/untar/functions.nf index da9da093..85628ee0 100644 --- a/modules/untar/functions.nf +++ b/modules/untar/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/untar/main.nf b/modules/untar/main.nf index fc6d7ec5..25b39904 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,13 +23,16 @@ process UNTAR { output: path "$untar" , emit: untar - path "*.version.txt", emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) untar = archive.toString() - '.tar.gz' """ tar -xzvf $options.args $archive - echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(tar --version 2>&1 | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + END_VERSIONS """ } diff --git a/modules/untar/meta.yml b/modules/untar/meta.yml index af4674f0..0dc38292 100644 --- a/modules/untar/meta.yml +++ b/modules/untar/meta.yml @@ -21,7 +21,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/unzip/functions.nf b/modules/unzip/functions.nf index da9da093..85628ee0 100644 --- a/modules/unzip/functions.nf +++ b/modules/unzip/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index b52fbb04..a2fe2594 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process UNZIP { output: path "${archive.baseName}/" , emit: unzipped_archive - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,6 +38,9 @@ process UNZIP { $options.args \\ $archive - echo \$(7za --help) | grep Version | sed 's/.*p7zip Version//; s/(.*//' 1> ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + 7za: \$( 7za --help) grep Version | sed 's/.*p7zip Version//; s/(.*//' ) + END_VERSIONS """ } diff --git a/modules/unzip/meta.yml b/modules/unzip/meta.yml index 97b1f1fc..386ca8bb 100644 --- a/modules/unzip/meta.yml +++ b/modules/unzip/meta.yml @@ -21,7 +21,7 @@ output: - version: type: file description: File or directory of decompressed archive - pattern: "*.{version.txt}" + pattern: "versions.yml" - unzipped_archive: type: directory description: Directory contents of the unzipped archive diff --git a/modules/variantbam/functions.nf b/modules/variantbam/functions.nf index da9da093..85628ee0 100644 --- a/modules/variantbam/functions.nf +++ b/modules/variantbam/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index dc29de58..c4ac3742 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -25,7 +25,7 @@ process VARIANTBAM { output: tuple val(meta), path("*.bam") , emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -36,6 +36,9 @@ process VARIANTBAM { -o ${prefix}.bam \\ $options.args - echo $VERSION > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/variantbam/meta.yml b/modules/variantbam/meta.yml index da0ff5e0..62ddb578 100644 --- a/modules/variantbam/meta.yml +++ b/modules/variantbam/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Filtered or downsampled BAM file diff --git a/modules/vcftools/functions.nf b/modules/vcftools/functions.nf index da9da093..85628ee0 100644 --- a/modules/vcftools/functions.nf +++ b/modules/vcftools/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index a0d8cd5a..7ae1619f 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,7 +28,7 @@ process VCFTOOLS { path(diff_variant_file) output: - path("*.version.txt"), emit: version + path("versions.yml") , emit: version tuple val(meta), path("*.vcf"), optional:true, emit: vcf tuple val(meta), path("*.bcf"), optional:true, emit: bcf @@ -124,6 +124,9 @@ process VCFTOOLS { $bed_arg \\ $diff_variant_arg \\ - echo \$(vcftools --version 2>&1) | sed 's/^.*vcftools //; s/Using.*\$//' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(vcftools --version 2>&1 | sed 's/^.*vcftools //; s/Using.*\$//') + END_VERSIONS """ } diff --git a/modules/vcftools/meta.yml b/modules/vcftools/meta.yml index 4da9b6c2..e39a0347 100644 --- a/modules/vcftools/meta.yml +++ b/modules/vcftools/meta.yml @@ -36,7 +36,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - vcf: type: file description: vcf file (optional) diff --git a/modules/yara/index/functions.nf b/modules/yara/index/functions.nf index da9da093..85628ee0 100644 --- a/modules/yara/index/functions.nf +++ b/modules/yara/index/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index f1fe13a5..c621e866 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,7 +23,7 @@ process YARA_INDEX { output: path "yara", emit: index - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,6 +34,9 @@ process YARA_INDEX { mv *.{lf,rid,sa,txt}.* yara cp $fasta yara/yara.fasta - echo \$(yara_indexer --help 2>&1) | grep -e "yara_indexer version:" | sed 's/yara_indexer version: //g' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(yara_indexer --help 2>&1 | grep -e "yara_indexer version:" | sed 's/yara_indexer version: //g') + END_VERSIONS """ } diff --git a/modules/yara/index/meta.yml b/modules/yara/index/meta.yml index f1e2ab93..acf70f2b 100644 --- a/modules/yara/index/meta.yml +++ b/modules/yara/index/meta.yml @@ -24,7 +24,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - index: type: file description: YARA genome index files diff --git a/modules/yara/mapper/functions.nf b/modules/yara/mapper/functions.nf index da9da093..85628ee0 100644 --- a/modules/yara/mapper/functions.nf +++ b/modules/yara/mapper/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index f888ae14..3404d591 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,7 +24,7 @@ process YARA_MAPPER { output: tuple val(meta), path("*.mapped.bam"), emit: bam - path "*.version.txt" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -34,14 +34,20 @@ process YARA_MAPPER { """ yara_mapper $options.args -t ${task.cpus} -f bam ${index}/yara $reads | samtools view -@ ${task.cpus} -hb -F4 > ${prefix}.mapped.bam - echo \$(yara_mapper --help 2>&1) > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(yara_mapper --help 2>&1) + END_VERSIONS """ } else { """ yara_mapper $options.args -t ${task.cpus} -f bam ${index}/yara ${reads[0]} ${reads[1]} > output.bam samtools view -@ ${task.cpus} -hF 4 -f 0x40 -b output.bam > ${prefix}_1.mapped.bam samtools view -@ ${task.cpus} -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam - echo \$(yara_mapper --version 2>&1) | grep -e "yara_mapper version:" | sed 's/yara_mapper version: //g' > ${software}.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(yara_mapper --version 2>&1 | grep -e "yara_mapper version:" | sed 's/yara_mapper version: //g') + END_VERSIONS """ } diff --git a/modules/yara/mapper/meta.yml b/modules/yara/mapper/meta.yml index 4a8e6494..4beb0c78 100644 --- a/modules/yara/mapper/meta.yml +++ b/modules/yara/mapper/meta.yml @@ -37,7 +37,7 @@ output: - version: type: file description: File containing software version - pattern: "*.{version.txt}" + pattern: "versions.yml" - bam: type: file description: Sorted BAM file From 4ec8b025bdb436e138230ceda06bcff94585dc01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Mon, 27 Sep 2021 16:14:35 +0100 Subject: [PATCH 095/314] New module: `LIMA` (#719) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: Add module lima * 👌 IMPROVE: Move .pbi output to reports channel * 🐛 FIX: Fix report channel definition * 👌IMPROVE; Remove options from command line update test script with removed options * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 🐛 FIX: Add pbi input * 👌 IMPROVE: Add parallelization to lima * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The accept one channel (primers move into the first channel) * 👌 IMPROVE: Assign a value channel for pimers Improve code workflow readability * 👌 IMPROVE: Update .gitignore * 👌 IMPROVE: Update module to last template version * 🐛 FIX: Correct Singularity and Docker URL * 👌 IMPROVE: Update to the last version of modules template * 👌 IMPROVE: Update test_data.config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: Fill contains args * 📦 NEW: Add module lima * 👌 IMPROVE: Move .pbi output to reports channel * 🐛 FIX: Fix report channel definition * 👌IMPROVE; Remove options from command line update test script with removed options * 🐛 FIX: Add pbi input * 👌 IMPROVE: Add parallelization to lima * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The accept one channel (primers move into the first channel) * 👌 IMPROVE: Assign a value channel for pimers Improve code workflow readability * 👌 IMPROVE: Update .gitignore * 👌 IMPROVE: Update module to last template version * 🐛 FIX: Correct Singularity and Docker URL * 👌 IMPROVE: Update to the last version of modules template * 👌 IMPROVE: Update test_data.config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: Fill contains args * 👌 IMPROVE: Add channel for each output * 👌 IMPROVE: Remove comments * 📦 NEW: Add module lima * 👌 IMPROVE: Move .pbi output to reports channel * 🐛 FIX: Fix report channel definition * 👌IMPROVE; Remove options from command line update test script with removed options * 🐛 FIX: Add pbi input * 👌 IMPROVE: Add parallelization to lima * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The accept one channel (primers move into the first channel) * 👌 IMPROVE: Assign a value channel for pimers Improve code workflow readability * 👌 IMPROVE: Update module to last template version * 🐛 FIX: Correct Singularity and Docker URL * 👌 IMPROVE: Update to the last version of modules template * 👌 IMPROVE: Update test_data.config * 👌 IMPROVE: Remove pbi from input files * 🐛 FIX: Fill contains args * 📦 NEW: Add module lima * 👌 IMPROVE: Move .pbi output to reports channel * 🐛 FIX: Fix report channel definition * 👌IMPROVE; Remove options from command line update test script with removed options * 🐛 FIX: Add pbi input * 👌 IMPROVE: Add parallelization to lima * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The accept one channel (primers move into the first channel) * 👌 IMPROVE: Assign a value channel for pimers Improve code workflow readability * 👌 IMPROVE: Update module to last template version * 🐛 FIX: Correct Singularity and Docker URL * 👌 IMPROVE: Update to the last version of modules template * 👌 IMPROVE: Update test_data.config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: Fill contains args * 👌 IMPROVE: Add channel for each output * 👌 IMPROVE: Remove comments * 🐛 FIX: Clean test_data.config * Update modules/lima/main.nf Add meta to each output Co-authored-by: Harshil Patel * Update modules/lima/main.nf Remove useless parenthesis Co-authored-by: Harshil Patel * 🐛 FIX: Keep version number only * 🐛 FIX: Reintegrate prefix variable and use it to define output file name * 👌 IMPROVE: add suffix arg to check output files names * 👌 IMPROVE: Use prefix for output filename * 🐛 FIX: Set optional output Allow usage of different input formats * 👌 IMPROVE: Update meta file * 👌 IMPROVE: Update test One test for each input file type * 👌 IMPROVE: add fasta, fastq.gz, fastq, fastq.gz test files * 👌 IMPROVE: Update with last templates / Follow new version.yaml rule * 🐛 FIX: Fix typos and include getProcessName function * 👌 IMPROVE: Update .gitignore * 👌 IMPROVE: Using suffix to manage output was not a my best idea Add a bash code to detect extension and update output file name * 👌 IMPROVE: clean code Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Mahesh Binzer-Panchal --- .gitignore | 2 + modules/lima/functions.nf | 78 ++++++++++++++++++++++++++++ modules/lima/main.nf | 73 ++++++++++++++++++++++++++ modules/lima/meta.yml | 77 ++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 5 ++ tests/modules/lima/main.nf | 60 ++++++++++++++++++++++ tests/modules/lima/test.yml | 91 +++++++++++++++++++++++++++++++++ 8 files changed, 390 insertions(+) create mode 100644 modules/lima/functions.nf create mode 100644 modules/lima/main.nf create mode 100644 modules/lima/meta.yml create mode 100644 tests/modules/lima/main.nf create mode 100644 tests/modules/lima/test.yml diff --git a/.gitignore b/.gitignore index 9d982e3f..06eae014 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,5 @@ __pycache__ *.pyo *.pyc tests/data/ +modules/modtest/ +tests/modules/modtest/ diff --git a/modules/lima/functions.nf b/modules/lima/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/lima/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/lima/main.nf b/modules/lima/main.nf new file mode 100644 index 00000000..1ff5ac48 --- /dev/null +++ b/modules/lima/main.nf @@ -0,0 +1,73 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process LIMA { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::lima=2.2.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0" + } else { + container "quay.io/biocontainers/lima:2.2.0--h9ee0642_0" + } + + input: + tuple val(meta), path(ccs) + path primers + + output: + tuple val(meta), path("*.clips") , emit: clips + tuple val(meta), path("*.counts") , emit: counts + tuple val(meta), path("*.guess") , emit: guess + tuple val(meta), path("*.report") , emit: report + tuple val(meta), path("*.summary"), emit: summary + path "versions.yml" , emit: version + + tuple val(meta), path("*.bam") , optional: true, emit: bam + tuple val(meta), path("*.bam.pbi") , optional: true, emit: pbi + tuple val(meta), path("*.{fa, fasta}") , optional: true, emit: fasta + tuple val(meta), path("*.{fa.gz, fasta.gz}"), optional: true, emit: fastagz + tuple val(meta), path("*.fastq") , optional: true, emit: fastq + tuple val(meta), path("*.fastq.gz") , optional: true, emit: fastqgz + tuple val(meta), path("*.xml") , optional: true, emit: xml + tuple val(meta), path("*.json") , optional: true, emit: json + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + OUT_EXT="" + + if [[ $ccs =~ bam\$ ]]; then + OUT_EXT="bam" + elif [[ $ccs =~ fasta\$ ]]; then + OUT_EXT="fasta" + elif [[ $ccs =~ fasta.gz\$ ]]; then + OUT_EXT="fasta.gz" + elif [[ $ccs =~ fastq\$ ]]; then + OUT_EXT="fastq" + elif [[ $ccs =~ fastq.gz\$ ]]; then + OUT_EXT="fastq.gz" + fi + + echo \$OUT_EXT + lima \\ + $ccs \\ + $primers \\ + $prefix.\$OUT_EXT \\ + -j $task.cpus \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + lima: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) + END_VERSIONS + """ +} diff --git a/modules/lima/meta.yml b/modules/lima/meta.yml new file mode 100644 index 00000000..3bb861b5 --- /dev/null +++ b/modules/lima/meta.yml @@ -0,0 +1,77 @@ +name: lima +description: lima - The PacBio Barcode Demultiplexer and Primer Remover +keywords: + - sort +tools: + - lima: + description: lima - The PacBio Barcode Demultiplexer and Primer Remover + homepage: https://github.com/PacificBiosciences/pbbioconda + documentation: https://lima.how/ + tool_dev_url: https://github.com/pacificbiosciences/barcoding/ + doi: "" + licence: ['BSD-3-clause-Clear'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - ccs: + type: file + description: A BAM or fasta or fasta.gz or fastq or fastq.gz file of subreads or ccs + pattern: "*.{bam,fasta,fasta.gz,fastq,fastq.gz}" + - primers: + type: file + description: Fasta file, sequences of primers + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: A bam file of ccs purged of primers + pattern: "*.bam" + - pbi: + type: file + description: Pacbio index file of ccs purged of primers + pattern: "*.bam" + - xml: + type: file + description: An XML file representing a set of a particular sequence data type such as subreads, references or aligned subreads. + pattern: "*.xml" + - json: + type: file + description: A metadata json file + pattern: "*.json" + - clips: + type: file + description: A fasta file of clipped primers + pattern: "*.clips" + - counts: + type: file + description: A tabulated file of describing pairs of primers + pattern: "*.counts" + - guess: + type: file + description: A second tabulated file of describing pairs of primers (no doc available) + pattern: "*.guess" + - report: + type: file + description: A tab-separated file about each ZMW, unfiltered + pattern: "*.report" + - summary: + type: file + description: This file shows how many ZMWs have been filtered, how ZMWs many are same/different, and how many reads have been filtered. + pattern: "*.summary" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 74673511..16d4790d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -562,6 +562,10 @@ last/train: - modules/last/train/** - tests/modules/last/train/** +lima: + - modules/lima/** + - tests/modules/lima/** + lofreq/call: - modules/lofreq/call/** - tests/modules/lofreq/call/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index eda747e0..8b246c7c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -175,6 +175,11 @@ params { alz = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.bam" alzpbi = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.bam.pbi" ccs = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.bam" + ccs_fa = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/alz.ccs.fasta" + ccs_fa_gz = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/alz.ccs.fasta.gz" + ccs_fq = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/alz.ccs.fastq" + ccs_fq_gz = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/alz.ccs.fastq.gz" + ccs_xml = "${test_data_dir}/genomics/homo_sapiens/pacbio/xml/alz.ccs.consensusreadset.xml" lima = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.bam" refine = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.bam" cluster = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.bam" diff --git a/tests/modules/lima/main.nf b/tests/modules/lima/main.nf new file mode 100644 index 00000000..df4b2be2 --- /dev/null +++ b/tests/modules/lima/main.nf @@ -0,0 +1,60 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LIMA } from '../../../modules/lima/main.nf' addParams( options: [args: '--isoseq --peek-guess', suffix: ".fl"] ) + +workflow test_lima_bam { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['ccs'], checkIfExists: true), + ] + primers = [ file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) ] + + LIMA ( input, primers ) +} + +workflow test_lima_fa { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['ccs_fa'], checkIfExists: true), + ] + primers = [ file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) ] + + LIMA ( input, primers ) +} + +workflow test_lima_fa_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['ccs_fa_gz'], checkIfExists: true), + ] + primers = [ file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) ] + + LIMA ( input, primers ) +} + +workflow test_lima_fq { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['ccs_fq'], checkIfExists: true), + ] + primers = [ file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) ] + + LIMA ( input, primers ) +} + +workflow test_lima_fq_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['ccs_fq_gz'], checkIfExists: true), + ] + primers = [ file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) ] + + LIMA ( input, primers ) +} diff --git a/tests/modules/lima/test.yml b/tests/modules/lima/test.yml new file mode 100644 index 00000000..1ff860d9 --- /dev/null +++ b/tests/modules/lima/test.yml @@ -0,0 +1,91 @@ +- name: lima test_lima_bam + command: nextflow run tests/modules/lima -entry test_lima_bam -c tests/config/nextflow.config + tags: + - lima + files: + - path: output/lima/test.fl.NEB_5p--NEB_Clontech_3p.bam + md5sum: 14b51d7f44e30c05a5b14e431a992097 + - path: output/lima/test.fl.NEB_5p--NEB_Clontech_3p.bam.pbi + md5sum: 6ae7f057304ad17dd9d5f565d72d3f7b + - path: output/lima/test.fl.NEB_5p--NEB_Clontech_3p.consensusreadset.xml + contains: [ 'ConsensusReadSet' ] + - path: output/lima/test.fl.json + contains: [ 'ConsensusReadSet' ] + - path: output/lima/test.fl.lima.clips + md5sum: fa03bc75bd78b2648a139fd67c69208f + - path: output/lima/test.fl.lima.counts + md5sum: 842c6a23ca2de504ced4538ad5111da1 + - path: output/lima/test.fl.lima.guess + md5sum: d3675af3ca8a908ee9e3c231668392d3 + - path: output/lima/test.fl.lima.report + md5sum: dc073985322ae0a003ccc7e0fa4db5e6 + - path: output/lima/test.fl.lima.summary + md5sum: bcbcaaaca418bdeb91141c81715ca420 + +- name: lima test_lima_fa + command: nextflow run tests/modules/lima -entry test_lima_fa -c tests/config/nextflow.config + tags: + - lima + files: + - path: output/lima/test.fl.lima.clips + md5sum: 1012bc8874a14836f291bac48e8482a4 + - path: output/lima/test.fl.lima.counts + md5sum: a4ceaa408be334eaa711577e95f8730e + - path: output/lima/test.fl.lima.guess + md5sum: 651e5f2b438b8ceadb3e06a2177e1818 + - path: output/lima/test.fl.lima.report + md5sum: bd4a8bde17471563cf91aab4c787911d + - path: output/lima/test.fl.lima.summary + md5sum: 03be2311ba4afb878d8e547ab38c11eb + +- name: lima test_lima_fa_gz + command: nextflow run tests/modules/lima -entry test_lima_fa_gz -c tests/config/nextflow.config + tags: + - lima + files: + - path: output/lima/test.fl.lima.clips + md5sum: 1012bc8874a14836f291bac48e8482a4 + - path: output/lima/test.fl.lima.counts + md5sum: a4ceaa408be334eaa711577e95f8730e + - path: output/lima/test.fl.lima.guess + md5sum: 651e5f2b438b8ceadb3e06a2177e1818 + - path: output/lima/test.fl.lima.report + md5sum: bd4a8bde17471563cf91aab4c787911d + - path: output/lima/test.fl.lima.summary + md5sum: 03be2311ba4afb878d8e547ab38c11eb + +- name: lima test_lima_fq + command: nextflow run tests/modules/lima -entry test_lima_fq -c tests/config/nextflow.config + tags: + - lima + files: + - path: output/lima/test.fl.NEB_5p--NEB_Clontech_3p.fastq + md5sum: ef395f689c5566f501e300bb83d7a5f2 + - path: output/lima/test.fl.lima.clips + md5sum: 5c16ef8122f6f1798acc30eb8a30828c + - path: output/lima/test.fl.lima.counts + md5sum: 767b687e6eda7b24cd0e577f527eb2f0 + - path: output/lima/test.fl.lima.guess + md5sum: 31b988aab6bda84867e704b9edd8a763 + - path: output/lima/test.fl.lima.report + md5sum: ad2a9b1eeb4cda4a1f69ef4b7520b5fd + - path: output/lima/test.fl.lima.summary + md5sum: e91d3c386aaf4effa63f33ee2eb7da2a + +- name: lima test_lima_fq_gz + command: nextflow run tests/modules/lima -entry test_lima_fq_gz -c tests/config/nextflow.config + tags: + - lima + files: + - path: output/lima/test.fl.NEB_5p--NEB_Clontech_3p.fastq.gz + md5sum: 32c11db85f69a1b4454b6bbd794b6df2 + - path: output/lima/test.fl.lima.clips + md5sum: 5c16ef8122f6f1798acc30eb8a30828c + - path: output/lima/test.fl.lima.counts + md5sum: 767b687e6eda7b24cd0e577f527eb2f0 + - path: output/lima/test.fl.lima.guess + md5sum: 31b988aab6bda84867e704b9edd8a763 + - path: output/lima/test.fl.lima.report + md5sum: ad2a9b1eeb4cda4a1f69ef4b7520b5fd + - path: output/lima/test.fl.lima.summary + md5sum: e91d3c386aaf4effa63f33ee2eb7da2a From 43c27792583b3db3faedf189adea48939e300cf2 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 27 Sep 2021 22:10:37 +0100 Subject: [PATCH 096/314] Fix version commands (#749) * Fix version commands * Fix version commands: again --- modules/abacas/main.nf | 2 +- modules/agrvate/main.nf | 2 +- modules/bandage/image/main.nf | 2 +- modules/bismark/align/main.nf | 2 +- modules/bismark/deduplicate/main.nf | 2 +- modules/bismark/genomepreparation/main.nf | 2 +- modules/bismark/methylationextractor/main.nf | 2 +- modules/bismark/report/main.nf | 2 +- modules/bowtie/align/main.nf | 2 +- modules/bowtie/build/main.nf | 2 +- modules/bowtie2/align/main.nf | 8 ++--- modules/bowtie2/build/main.nf | 2 +- modules/bwa/aln/main.nf | 6 ++-- modules/bwa/index/main.nf | 13 +++++--- modules/bwa/mem/main.nf | 4 +-- modules/bwa/sampe/main.nf | 2 +- modules/bwa/samse/main.nf | 2 +- modules/bwamem2/index/main.nf | 8 +++-- modules/bwamem2/mem/main.nf | 7 +++-- modules/bwameth/align/main.nf | 2 +- modules/bwameth/index/main.nf | 2 +- modules/chromap/chromap/main.nf | 8 +++-- modules/chromap/index/main.nf | 1 + modules/gatk4/applybqsr/main.nf | 2 +- modules/gatk4/baserecalibrator/main.nf | 2 +- modules/gatk4/bedtointervallist/main.nf | 2 +- .../gatk4/createsequencedictionary/main.nf | 2 +- modules/gatk4/fastqtosam/main.nf | 2 +- modules/gatk4/getpileupsummaries/main.nf | 2 +- modules/gatk4/haplotypecaller/main.nf | 2 +- modules/gatk4/intervallisttools/main.nf | 2 +- modules/gatk4/markduplicates/main.nf | 2 +- modules/gatk4/mergebamalignment/main.nf | 2 +- modules/gatk4/mergevcfs/main.nf | 2 +- modules/gatk4/mutect2/main.nf | 2 +- modules/gatk4/revertsam/main.nf | 2 +- modules/gatk4/samtofastq/main.nf | 2 +- modules/gatk4/splitncigarreads/main.nf | 2 +- modules/gatk4/variantfiltration/main.nf | 2 +- modules/salmon/index/main.nf | 2 +- modules/salmon/quant/main.nf | 2 +- modules/samtools/ampliconclip/main.nf | 2 +- modules/samtools/faidx/main.nf | 2 +- modules/samtools/fastq/main.nf | 2 +- modules/samtools/flagstat/main.nf | 2 +- modules/samtools/idxstats/main.nf | 2 +- modules/samtools/index/main.nf | 2 +- modules/samtools/merge/main.nf | 2 +- modules/samtools/mpileup/main.nf | 2 +- modules/samtools/sort/main.nf | 2 +- modules/samtools/stats/main.nf | 2 +- modules/samtools/view/main.nf | 2 +- modules/trimgalore/main.nf | 6 ++-- tests/modules/bandage/image/main.nf | 8 ++--- tests/modules/bbmap/align/main.nf | 14 ++++----- tests/modules/bismark/align/test.yml | 4 +-- tests/modules/bismark/deduplicate/test.yml | 2 +- .../bismark/genomepreparation/test.yml | 2 +- .../bismark/methylationextractor/test.yml | 2 +- tests/modules/bismark/report/test.yml | 2 +- tests/modules/bismark/summary/test.yml | 2 +- tests/modules/bowtie/build_test/main.nf | 2 +- tests/modules/bowtie/build_test/test.yml | 12 ++++---- tests/modules/bowtie2/build_test/main.nf | 2 +- tests/modules/bowtie2/build_test/test.yml | 12 ++++---- tests/modules/bwa/index/main.nf | 2 +- tests/modules/bwa/index/test.yml | 10 +++---- tests/modules/bwamem2/index/main.nf | 2 +- tests/modules/bwamem2/index/test.yml | 10 +++---- tests/modules/bwameth/align/test.yml | 4 +-- tests/modules/bwameth/index/main.nf | 2 +- tests/modules/bwameth/index/test.yml | 14 ++++----- tests/modules/methyldackel/extract/test.yml | 2 +- tests/modules/methyldackel/mbias/test.yml | 2 +- tests/modules/minia/test.yml | 2 +- tests/modules/qualimap/bamqc/test.yml | 2 +- tests/modules/salmon/index/main.nf | 4 +-- tests/modules/salmon/index/test.yml | 30 +++++++++---------- tests/modules/samtools/faidx/test.yml | 2 +- 79 files changed, 159 insertions(+), 142 deletions(-) diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index 0e46f854..307e17d2 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -42,7 +42,7 @@ process ABACAS { mv unused_contigs.out ${prefix}.abacas.unused.contigs.out cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(abacas.pl -v 2>&1 | sed 's/^.*ABACAS.//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index 44ec0825..3ca2e0f4 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -36,7 +36,7 @@ process AGRVATE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(agrvate -v 2>&1 | sed 's/agrvate //;') + ${getSoftwareName(task.process)}: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') END_VERSIONS """ } diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index c788e2e1..d15d4826 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -35,7 +35,7 @@ process BANDAGE_IMAGE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(Bandage --version 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index 00510272..ce042933 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -41,7 +41,7 @@ process BISMARK_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index 6e3219f0..8555563d 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -38,7 +38,7 @@ process BISMARK_DEDUPLICATE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 029804d9..0a3fae14 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -34,7 +34,7 @@ process BISMARK_GENOMEPREPARATION { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index 5968d38f..bafeaad6 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -45,7 +45,7 @@ process BISMARK_METHYLATIONEXTRACTOR { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index 8148b061..d7ab3e01 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -32,7 +32,7 @@ process BISMARK_REPORT { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 3357a592..73554fa2 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -57,7 +57,7 @@ process BOWTIE_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bowtie --version 2>&1 | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 382e6717..3ae07729 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -32,7 +32,7 @@ process BOWTIE_BUILD { bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bowtie --version 2>&1 | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index e1657a8f..7d33da03 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -39,7 +39,7 @@ process BOWTIE2_ALIGN { bowtie2 \\ -x \$INDEX \\ -U $reads \\ - --threads ${split_cpus} \\ + --threads $split_cpus \\ $unaligned \\ $options.args \\ 2> ${prefix}.bowtie2.log \\ @@ -47,7 +47,7 @@ process BOWTIE2_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } else { @@ -58,7 +58,7 @@ process BOWTIE2_ALIGN { -x \$INDEX \\ -1 ${reads[0]} \\ -2 ${reads[1]} \\ - --threads ${split_cpus} \\ + --threads $split_cpus \\ $unaligned \\ $options.args \\ 2> ${prefix}.bowtie2.log \\ @@ -72,7 +72,7 @@ process BOWTIE2_ALIGN { fi cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index 04880aeb..f140d7a4 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -32,7 +32,7 @@ process BOWTIE2_BUILD { bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bowtie2 --version 2>&1 | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index 8728884c..ae4ee147 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -24,7 +24,7 @@ process BWA_ALN { output: tuple val(meta), path("*.sai"), emit: sai - path "versions.yml" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -43,7 +43,7 @@ process BWA_ALN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } else { @@ -66,7 +66,7 @@ process BWA_ALN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index 9b64bd37..9de3fe0c 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -22,17 +22,22 @@ process BWA_INDEX { path fasta output: - path "bwa" , emit: index - path "versions.yml" , emit: version + path "bwa" , emit: index + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) """ mkdir bwa - bwa index $options.args $fasta -p bwa/${fasta.baseName} + bwa \\ + index \\ + $options.args \\ + $fasta \\ + -p bwa/${fasta.baseName} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index b9096cb8..05e5260c 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -37,14 +37,14 @@ process BWA_MEM { bwa mem \\ $options.args \\ $read_group \\ - -t ${split_cpus} \\ + -t $split_cpus \\ \$INDEX \\ $reads \\ | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index cb3493c8..ae2998d8 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -43,7 +43,7 @@ process BWA_SAMPE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 82d23854..89310153 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -43,7 +43,7 @@ process BWA_SAMSE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa 2>&1 | sed 's/^.*Version: //; s/Contact:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index f052d172..9274ebe8 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -29,10 +29,14 @@ process BWAMEM2_INDEX { def software = getSoftwareName(task.process) """ mkdir bwamem2 - bwa-mem2 index $options.args $fasta -p bwamem2/${fasta} + bwa-mem2 \\ + index \\ + $options.args \\ + $fasta -p bwamem2/${fasta} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa-mem2 version 2>&1) + ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ } diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 2838cdda..f47bfea3 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -34,17 +34,18 @@ process BWAMEM2_MEM { """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` - bwa-mem2 mem \\ + bwa-mem2 \\ + mem \\ $options.args \\ $read_group \\ - -t ${split_cpus} \\ + -t $split_cpus \\ \$INDEX \\ $reads \\ | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwa-mem2 version 2>&1) + ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ } diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 0f605bd1..814faa2b 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -44,7 +44,7 @@ process BWAMETH_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwameth.py --version 2>&1 | cut -f2 -d" ") + ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index 7b75d328..a7a0b783 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -32,7 +32,7 @@ process BWAMETH_INDEX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bwameth.py --version 2>&1 | cut -f2 -d" ") + ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index cbee7fc0..193dfd5e 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -75,11 +75,13 @@ process CHROMAP_CHROMAP { -1 ${reads.join(',')} \\ -o ${prefix}.${file_extension} + $compression_cmds + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo "$VERSION") END_VERSIONS - """ + compression_cmds + """ } else { """ chromap ${args.join(' ')} \\ @@ -90,10 +92,12 @@ process CHROMAP_CHROMAP { -2 ${reads[1]} \\ -o ${prefix}.${file_extension} + $compression_cmds + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo "$VERSION") END_VERSIONS - """ + compression_cmds + """ } } diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index 764eefe1..e52ffe4b 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -35,6 +35,7 @@ process CHROMAP_INDEX { -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo "$VERSION") diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 9c51ce60..91c23b29 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -44,7 +44,7 @@ process GATK4_APPLYBQSR { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 9abca6e9..2f368014 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -47,7 +47,7 @@ process GATK4_BASERECALIBRATOR { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index fc484f84..28b88f5b 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -38,7 +38,7 @@ process GATK4_BEDTOINTERVALLIST { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 0c0446c6..b384d405 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -42,7 +42,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index e7b38f35..cb8ec0ea 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -38,7 +38,7 @@ process GATK4_FASTQTOSAM { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index da03555c..782b7653 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -45,7 +45,7 @@ process GATK4_GETPILEUPSUMMARIES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 02fd1ee3..63771393 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -49,7 +49,7 @@ process GATK4_HAPLOTYPECALLER { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 90a77c5a..99257354 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -50,7 +50,7 @@ process GATK4_INTERVALLISTTOOLS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 68b17366..3a3c8e70 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -41,7 +41,7 @@ process GATK4_MARKDUPLICATES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 269836a7..978b7cff 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -41,7 +41,7 @@ process GATK4_MERGEBAMALIGNMENT { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index d47aa68f..c62a6289 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -46,7 +46,7 @@ process GATK4_MERGEVCFS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 03bcc2d1..c4efc724 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -74,7 +74,7 @@ process GATK4_MUTECT2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index e691d3f9..0a95b604 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -36,7 +36,7 @@ process GATK4_REVERTSAM { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index edf895bb..eed7a83f 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -37,7 +37,7 @@ process GATK4_SAMTOFASTQ { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 11d6c9a5..0c4ba163 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -38,7 +38,7 @@ process GATK4_SPLITNCIGARREADS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 90b6ef25..a79bce8f 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -41,7 +41,7 @@ process GATK4_VARIANTFILTRATION { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gatk --version 2>&1 | sed 's/^.*(GATK) v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index df4e2ed8..9e62eb8a 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -48,7 +48,7 @@ process SALMON_INDEX { -i salmon cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(salmon --version | sed -e "s/salmon //g") + ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 92d85f58..397bdd31 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -74,7 +74,7 @@ process SALMON_QUANT { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(salmon --version | sed -e "s/salmon //g") + ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 4a08026e..cccf2f7c 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -48,7 +48,7 @@ process SAMTOOLS_AMPLICONCLIP { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index fad14602..f7d6cbef 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -31,7 +31,7 @@ process SAMTOOLS_FAIDX { samtools faidx $fasta cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 73d32db8..02110870 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -38,7 +38,7 @@ process SAMTOOLS_FASTQ { $bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index 70c04b23..d0cf86aa 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -31,7 +31,7 @@ process SAMTOOLS_FLAGSTAT { samtools flagstat $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index 33605f30..06a07964 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -31,7 +31,7 @@ process SAMTOOLS_IDXSTATS { samtools idxstats $bam > ${bam}.idxstats cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 83802d95..c2ba4de7 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -32,7 +32,7 @@ process SAMTOOLS_INDEX { samtools index $options.args $bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 85a41926..ec574105 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -32,7 +32,7 @@ process SAMTOOLS_MERGE { samtools merge ${prefix}.bam $bams cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 28185934..903bfd33 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -37,7 +37,7 @@ process SAMTOOLS_MPILEUP { $bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 4c3c4c1f..edd558bf 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -32,7 +32,7 @@ process SAMTOOLS_SORT { samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index b1fd325f..823b5f31 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -31,7 +31,7 @@ process SAMTOOLS_STATS { samtools stats $bam > ${bam}.stats cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 824b9bab..110d5abf 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -32,7 +32,7 @@ process SAMTOOLS_VIEW { samtools view $options.args $bam > ${prefix}.bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(samtools --version 2>&1 | sed 's/^.*samtools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index c002062a..6f5a65c2 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -62,7 +62,8 @@ process TRIMGALORE { ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(trim_galore --version 2>&1 | sed 's/^.*version //; s/Last.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + cutadapt: \$(cutadapt --version) END_VERSIONS """ } else { @@ -82,7 +83,8 @@ process TRIMGALORE { ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(trim_galore --version 2>&1 | sed 's/^.*version //; s/Last.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + cutadapt: \$(cutadapt --version) END_VERSIONS """ } diff --git a/tests/modules/bandage/image/main.nf b/tests/modules/bandage/image/main.nf index becfb450..524066b0 100644 --- a/tests/modules/bandage/image/main.nf +++ b/tests/modules/bandage/image/main.nf @@ -5,10 +5,10 @@ nextflow.enable.dsl = 2 include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' addParams( options: [:] ) workflow test_bandage_image { - input = [ [ id:'B-3106' ], // meta map - [ file("${launchDir}/tests/data/generic/gfa/B-3106.gfa", checkIfExists: true) ] - //[ file("${launchDir}/tests/data/genomics/sarscov2/genome/gfa/test.gfa", checkIfExists: true) ] - ] + input = [ + [ id:'B-3106' ], // meta map + file( params.test_data['sarscov2']['illumina']['assembly_gfa'], checkIfExists: true) + ] BANDAGE_IMAGE ( input ) } diff --git a/tests/modules/bbmap/align/main.nf b/tests/modules/bbmap/align/main.nf index 248e3975..c3bf43ba 100644 --- a/tests/modules/bbmap/align/main.nf +++ b/tests/modules/bbmap/align/main.nf @@ -7,11 +7,11 @@ include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' addParams include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' addParams( options: [args: "unpigz=t" ] ) workflow test_bbmap_align_paired_end_fasta_ref { - + input = [ [ id:'test', single_end:false ], // meta map [ file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) @@ -20,11 +20,11 @@ workflow test_bbmap_align_paired_end_fasta_ref { } workflow test_bbmap_align_paired_end_index_ref { - + input = [ [ id:'test', single_end:false ], // meta map [ file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) @@ -34,7 +34,7 @@ workflow test_bbmap_align_paired_end_index_ref { } workflow test_bbmap_align_single_end_index_ref { - + input = [ [ id:'test', single_end:true ], // meta map file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] @@ -45,11 +45,11 @@ workflow test_bbmap_align_single_end_index_ref { } workflow test_bbmap_align_paired_end_index_ref_pigz { - + input = [ [ id:'test', single_end:false ], // meta map [ file( params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + file( params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bismark/align/test.yml b/tests/modules/bismark/align/test.yml index 662a4aef..42dc44b3 100644 --- a/tests/modules/bismark/align/test.yml +++ b/tests/modules/bismark/align/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark align single-end test workflow +- name: bismark align single-end test workflow command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c tests/config/nextflow.config tags: - bismark @@ -8,7 +8,7 @@ md5sum: dca4ba9ff705b70446f812e59bdb1a32 - path: output/test_single_end/test.methylated_1_bismark_bt2_SE_report.txt -- name: Run bismark align paired-end test workflow +- name: bismark align paired-end test workflow command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bismark/deduplicate/test.yml b/tests/modules/bismark/deduplicate/test.yml index b0fb858c..604c1023 100644 --- a/tests/modules/bismark/deduplicate/test.yml +++ b/tests/modules/bismark/deduplicate/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark deduplicate test workflow +- name: bismark deduplicate test workflow command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bismark/genomepreparation/test.yml b/tests/modules/bismark/genomepreparation/test.yml index 5ce272c6..15a7e7d6 100644 --- a/tests/modules/bismark/genomepreparation/test.yml +++ b/tests/modules/bismark/genomepreparation/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark_genomepreparation test workflow +- name: bismark genomepreparation test workflow command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bismark/methylationextractor/test.yml b/tests/modules/bismark/methylationextractor/test.yml index a64c1edf..4505c428 100644 --- a/tests/modules/bismark/methylationextractor/test.yml +++ b/tests/modules/bismark/methylationextractor/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark methylation extractor test workflow +- name: bismark methylation extractor test workflow command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bismark/report/test.yml b/tests/modules/bismark/report/test.yml index 7025d38c..7e85e4dd 100644 --- a/tests/modules/bismark/report/test.yml +++ b/tests/modules/bismark/report/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark report test workflow +- name: bismark report test workflow command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bismark/summary/test.yml b/tests/modules/bismark/summary/test.yml index ee438991..06478873 100644 --- a/tests/modules/bismark/summary/test.yml +++ b/tests/modules/bismark/summary/test.yml @@ -1,4 +1,4 @@ -- name: Run bismark summary test workflow +- name: bismark summary test workflow command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c tests/config/nextflow.config tags: - bismark diff --git a/tests/modules/bowtie/build_test/main.nf b/tests/modules/bowtie/build_test/main.nf index 6796aad2..a89091a8 100644 --- a/tests/modules/bowtie/build_test/main.nf +++ b/tests/modules/bowtie/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [:] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [publish_dir:'bowtie'] ) workflow test_bowtie_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie/build_test/test.yml b/tests/modules/bowtie/build_test/test.yml index 0ba58194..c6b765c9 100644 --- a/tests/modules/bowtie/build_test/test.yml +++ b/tests/modules/bowtie/build_test/test.yml @@ -4,15 +4,15 @@ - bowtie - bowtie/build files: - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 diff --git a/tests/modules/bowtie2/build_test/main.nf b/tests/modules/bowtie2/build_test/main.nf index f1a95156..2b41fab2 100644 --- a/tests/modules/bowtie2/build_test/main.nf +++ b/tests/modules/bowtie2/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [:] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [publish_dir:'bowtie2'] ) workflow test_bowtie2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie2/build_test/test.yml b/tests/modules/bowtie2/build_test/test.yml index cb7283e3..3fd049b9 100644 --- a/tests/modules/bowtie2/build_test/test.yml +++ b/tests/modules/bowtie2/build_test/test.yml @@ -4,15 +4,15 @@ - bowtie2 - bowtie2/build files: - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 diff --git a/tests/modules/bwa/index/main.nf b/tests/modules/bwa/index/main.nf index fa7fffbc..30d31202 100644 --- a/tests/modules/bwa/index/main.nf +++ b/tests/modules/bwa/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [publish_dir:'bwa'] ) workflow test_bwa_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index 58af27e3..cdcb5e53 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -4,13 +4,13 @@ - bwa - bwa/index files: - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 diff --git a/tests/modules/bwamem2/index/main.nf b/tests/modules/bwamem2/index/main.nf index 897a62fe..bb7d0803 100644 --- a/tests/modules/bwamem2/index/main.nf +++ b/tests/modules/bwamem2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [:] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [publish_dir:'bwamem2'] ) workflow test_bwamem2_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwamem2/index/test.yml b/tests/modules/bwamem2/index/test.yml index b71b6901..d9d15c53 100644 --- a/tests/modules/bwamem2/index/test.yml +++ b/tests/modules/bwamem2/index/test.yml @@ -4,13 +4,13 @@ - bwamem2 - bwamem2/index files: - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 diff --git a/tests/modules/bwameth/align/test.yml b/tests/modules/bwameth/align/test.yml index a7d9fdbc..8545972f 100644 --- a/tests/modules/bwameth/align/test.yml +++ b/tests/modules/bwameth/align/test.yml @@ -1,4 +1,4 @@ -- name: Run bwameth single-end test workflow +- name: bwameth align single-end test workflow command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c tests/config/nextflow.config tags: - bwameth @@ -6,7 +6,7 @@ files: - path: output/test_single_end/test.bam -- name: Run bwameth paired-end test workflow +- name: bwameth align paired-end test workflow command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c tests/config/nextflow.config tags: - bwameth diff --git a/tests/modules/bwameth/index/main.nf b/tests/modules/bwameth/index/main.nf index 17477ca0..46662201 100644 --- a/tests/modules/bwameth/index/main.nf +++ b/tests/modules/bwameth/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [publish_dir:'bwameth'] ) workflow test_bwameth_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwameth/index/test.yml b/tests/modules/bwameth/index/test.yml index 5a2595d8..0cc7922e 100644 --- a/tests/modules/bwameth/index/test.yml +++ b/tests/modules/bwameth/index/test.yml @@ -1,18 +1,18 @@ -- name: Run bwameth index test workflow +- name: bwameth index test workflow command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c tests/config/nextflow.config tags: - bwameth - bwameth/index files: - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t md5sum: 98039984526a41d04d6bd92fcc040c62 - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t.pac + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t.pac md5sum: 4d8e51cb0bbdeaf24576bdf0264d8653 - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t.amb + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t.amb md5sum: 249a4195069071ce47cd0bae68abe376 - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t.ann + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t.ann md5sum: 46524d4359dcdfb203a235ab3b930dbb - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t.bwt + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t.bwt md5sum: 84f65df7d42dbe84c9ccfaddfdd5ea6b - - path: ./output/index/bwameth/genome.fasta.bwameth.c2t.sa + - path: ./output/bwameth/bwameth/genome.fasta.bwameth.c2t.sa md5sum: d25f6486f5134f57ed5b258f6fbb8673 diff --git a/tests/modules/methyldackel/extract/test.yml b/tests/modules/methyldackel/extract/test.yml index e2494181..70c371d7 100644 --- a/tests/modules/methyldackel/extract/test.yml +++ b/tests/modules/methyldackel/extract/test.yml @@ -1,4 +1,4 @@ -- name: Run methyldackel extract test workflow +- name: methyldackel extract command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c tests/config/nextflow.config tags: - methyldackel diff --git a/tests/modules/methyldackel/mbias/test.yml b/tests/modules/methyldackel/mbias/test.yml index 37102aec..43074291 100644 --- a/tests/modules/methyldackel/mbias/test.yml +++ b/tests/modules/methyldackel/mbias/test.yml @@ -1,4 +1,4 @@ -- name: Run methyldackel mbias test workflow +- name: methyldackel mbias command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c tests/config/nextflow.config tags: - methyldackel diff --git a/tests/modules/minia/test.yml b/tests/modules/minia/test.yml index d4e84e52..6836f51d 100644 --- a/tests/modules/minia/test.yml +++ b/tests/modules/minia/test.yml @@ -1,4 +1,4 @@ -- name: Run tests for minia - test_minia +- name: minia command: nextflow run tests/modules/minia -entry test_minia -c tests/config/nextflow.config tags: - minia diff --git a/tests/modules/qualimap/bamqc/test.yml b/tests/modules/qualimap/bamqc/test.yml index 704c08b2..71a40c13 100644 --- a/tests/modules/qualimap/bamqc/test.yml +++ b/tests/modules/qualimap/bamqc/test.yml @@ -1,4 +1,4 @@ -- name: Run qualimap bamqc test workflow +- name: qualimap bamqc test workflow command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c tests/config/nextflow.config tags: - qualimap diff --git a/tests/modules/salmon/index/main.nf b/tests/modules/salmon/index/main.nf index d4c87c45..98804733 100644 --- a/tests/modules/salmon/index/main.nf +++ b/tests/modules/salmon/index/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [:] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [publish_dir:'salmon'] ) workflow test_salmon_index { genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) - + SALMON_INDEX ( genome_fasta, transcript_fasta ) } diff --git a/tests/modules/salmon/index/test.yml b/tests/modules/salmon/index/test.yml index 156bc5ca..acefb044 100644 --- a/tests/modules/salmon/index/test.yml +++ b/tests/modules/salmon/index/test.yml @@ -4,28 +4,28 @@ - salmon - salmon/index files: - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index bcadf955..49a92265 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,4 +1,4 @@ -- name: Run samtools faidx test workflow +- name: samtools faidx test workflow command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config tags: - samtools From 3c5492b4a383ebd822422804d74cc9e15a747b0d Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 28 Sep 2021 06:56:27 +0100 Subject: [PATCH 097/314] Fix more version commands (#750) * Fix outstanding tests * Fix more version commands * Fix remaining modules --- modules/bismark/summary/main.nf | 2 +- modules/chromap/chromap/main.nf | 2 +- modules/graphmap2/align/main.nf | 2 +- modules/graphmap2/index/main.nf | 2 +- modules/gunzip/main.nf | 8 ++- modules/ivar/consensus/main.nf | 2 +- modules/ivar/trim/main.nf | 2 +- modules/ivar/variants/main.nf | 2 +- modules/kraken2/kraken2/main.nf | 2 +- modules/lofreq/call/main.nf | 2 +- modules/lofreq/callparallel/main.nf | 2 +- modules/lofreq/filter/main.nf | 2 +- modules/lofreq/indelqual/main.nf | 2 +- modules/minia/main.nf | 2 +- modules/preseq/lcextrap/main.nf | 2 +- modules/qualimap/bamqc/main.nf | 2 +- modules/tabix/bgzip/main.nf | 2 +- modules/tabix/bgziptabix/main.nf | 2 +- modules/tabix/tabix/main.nf | 2 +- modules/ucsc/wigtobigwig/main.nf | 4 +- modules/untar/main.nf | 12 +++-- modules/unzip/main.nf | 2 +- tests/modules/bwa/sampe/main.nf | 15 ++++-- tests/modules/bwa/samse/main.nf | 13 +++-- tests/modules/bwameth/align/main.nf | 9 ++-- tests/modules/bwameth/align/test.yml | 4 +- tests/modules/genmap/index/main.nf | 2 +- tests/modules/genmap/index/test.yml | 42 +++++++-------- tests/modules/ivar/trim/test.yml | 2 +- tests/modules/qualimap/bamqc/test.yml | 2 +- .../modules/rsem/calculateexpression/main.nf | 17 +++--- tests/modules/rsem/preparereference/main.nf | 2 +- tests/modules/rsem/preparereference/test.yml | 16 +++--- tests/modules/star/genomegenerate/main.nf | 2 +- tests/modules/star/genomegenerate/test.yml | 32 +++++------ tests/modules/stringtie/merge/main.nf | 54 +++++++++++-------- tests/modules/stringtie/merge/test.yml | 8 +-- tests/modules/stringtie/stringtie/main.nf | 24 +++++---- tests/modules/stringtie/stringtie/test.yml | 36 ++++++------- 39 files changed, 185 insertions(+), 157 deletions(-) diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index ae8ac27c..d71772b3 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -35,7 +35,7 @@ process BISMARK_SUMMARY { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bismark -v 2>&1 | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 193dfd5e..00aae27e 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = 0.1 // No version information printed +def VERSION = '0.1' // No version information printed process CHROMAP_CHROMAP { tag "$meta.id" diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index cf598b3d..30d6cbfd 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -43,7 +43,7 @@ process GRAPHMAP2_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(graphmap2 align 2>&1 | sed 's/^.*Version: v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index 906aa6ec..194c3594 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -36,7 +36,7 @@ process GRAPHMAP2_INDEX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(graphmap2 align 2>&1 | sed 's/^.*Version: v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index a53a9858..6a2287b6 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -29,10 +29,14 @@ process GUNZIP { def software = getSoftwareName(task.process) gunzip = archive.toString() - '.gz' """ - gunzip -f $options.args $archive + gunzip \\ + -f \\ + $options.args \\ + $archive + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gunzip --version 2>&1 | sed 's/^.*(gzip) //; s/ Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 7c4a5b57..b29450b7 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -44,7 +44,7 @@ process IVAR_CONSENSUS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index e9b1e23b..2a698249 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -40,7 +40,7 @@ process IVAR_TRIM { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index 505d72fb..2bf82a37 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -47,7 +47,7 @@ process IVAR_VARIANTS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(ivar version 2>&1 | sed 's/^.*iVar version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 9a01389a..4000d12a 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -50,7 +50,7 @@ process KRAKEN2_KRAKEN2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kraken2 --version 2>&1 | sed 's/^.*Kraken version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index b205f041..9fb113ff 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -39,7 +39,7 @@ process LOFREQ_CALL { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 2bea68f2..42400793 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -41,7 +41,7 @@ process LOFREQ_CALLPARALLEL { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 693cef23..09c91c8c 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -37,7 +37,7 @@ process LOFREQ_FILTER { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^version: //; s/ *commit.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index 89c79c39..78466574 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -37,7 +37,7 @@ process LOFREQ_INDELQUAL { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lofreq version 2>&1 | sed 's/^.*lofreq //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/minia/main.nf b/modules/minia/main.nf index b7aa9272..518e8264 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -40,7 +40,7 @@ process MINIA { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minia --version 2>&1 | sed 's/^.*Minia version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1) | sed 's/^.*Minia version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index 059b81f6..69f682d3 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -42,7 +42,7 @@ process PRESEQ_LCEXTRAP { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(preseq 2>&1 | sed 's/^.*Version: //; s/Usage:.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index 17779e27..0cc101ef 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -58,7 +58,7 @@ process QUALIMAP_BAMQC { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(qualimap 2>&1 | sed 's/^.*QualiMap v.//; s/Built.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index eb95de62..c76588df 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -33,7 +33,7 @@ process TABIX_BGZIP { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 7179a97e..302c8500 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -34,7 +34,7 @@ process TABIX_BGZIPTABIX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index f703a787..1fabeba4 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -32,7 +32,7 @@ process TABIX_TABIX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(tabix -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index 945f07c9..29e5cd99 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -4,6 +4,8 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) +def VERSION = '377' // No version information printed + process UCSC_WIGTOBIGWIG { tag '$wig' label 'process_medium' @@ -38,7 +40,7 @@ process UCSC_WIGTOBIGWIG { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(wigToBigWig 2>&1 | sed 's/wigToBigWig v //; s/ - Convert.*\$//') + ${getSoftwareName(task.process)}: \$(echo "$VERSION") END_VERSIONS """ } diff --git a/modules/untar/main.nf b/modules/untar/main.nf index 25b39904..0866dd55 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -22,17 +22,21 @@ process UNTAR { path archive output: - path "$untar" , emit: untar - path "versions.yml" , emit: version + path "$untar" , emit: untar + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) untar = archive.toString() - '.tar.gz' """ - tar -xzvf $options.args $archive + tar \\ + -xzvf \\ + $options.args \\ + $archive + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(tar --version 2>&1 | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index a2fe2594..9e64bb1b 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -40,7 +40,7 @@ process UNZIP { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - 7za: \$( 7za --help) grep Version | sed 's/.*p7zip Version//; s/(.*//' ) + 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') END_VERSIONS """ } diff --git a/tests/modules/bwa/sampe/main.nf b/tests/modules/bwa/sampe/main.nf index 86b019b5..017f27e5 100644 --- a/tests/modules/bwa/sampe/main.nf +++ b/tests/modules/bwa/sampe/main.nf @@ -3,18 +3,23 @@ nextflow.enable.dsl = 2 include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' addParams( options: [:] ) workflow test_bwa_sampe { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + Channel + .fromList( + [ + [ id:'test', single_end:false ], + [ [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] ] + ).collect() + .set { input } fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) BWA_ALN ( input, BWA_INDEX.out.index ) - BWA_SAMPE ( BWA_ALN.out.sai, BWA_INDEX.out.index ) + BWA_SAMPE ( input.join(BWA_ALN.out.sai), BWA_INDEX.out.index ) } diff --git a/tests/modules/bwa/samse/main.nf b/tests/modules/bwa/samse/main.nf index 5a5d8d2b..87a7c7b1 100644 --- a/tests/modules/bwa/samse/main.nf +++ b/tests/modules/bwa/samse/main.nf @@ -3,17 +3,20 @@ nextflow.enable.dsl = 2 include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' addParams( options: [:] ) workflow test_bwa_samse { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + Channel + .fromList( + [ [ id:'test', single_end:true ], + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ).collect() + .set { input } fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) BWA_ALN ( input, BWA_INDEX.out.index ) - BWA_SAMSE ( BWA_ALN.out.sai, BWA_INDEX.out.index ) + BWA_SAMSE ( input.join(BWA_ALN.out.sai, by:[0]), BWA_INDEX.out.index ) } diff --git a/tests/modules/bwameth/align/main.nf b/tests/modules/bwameth/align/main.nf index 7a7aa99c..fb8cad6a 100644 --- a/tests/modules/bwameth/align/main.nf +++ b/tests/modules/bwameth/align/main.nf @@ -2,9 +2,8 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) -include { BWAMETH_ALIGN as BWAMETH_ALIGN_SE } from '../../../../modules/bwameth/align/main.nf' addParams( options: [ publish_dir:'test_single_end' ] ) -include { BWAMETH_ALIGN as BWAMETH_ALIGN_PE } from '../../../../modules/bwameth/align/main.nf' addParams( options: [ publish_dir:'test_paired_end' ] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) +include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' addParams( options: [:] ) // // Test with single-end data @@ -16,7 +15,7 @@ workflow test_bwameth_align_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) - BWAMETH_ALIGN_SE ( input, BWAMETH_INDEX.out.index ) + BWAMETH_ALIGN ( input, BWAMETH_INDEX.out.index ) } // @@ -30,5 +29,5 @@ workflow test_bwameth_align_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) - BWAMETH_ALIGN_PE ( input, BWAMETH_INDEX.out.index ) + BWAMETH_ALIGN ( input, BWAMETH_INDEX.out.index ) } diff --git a/tests/modules/bwameth/align/test.yml b/tests/modules/bwameth/align/test.yml index 8545972f..5cf4b84d 100644 --- a/tests/modules/bwameth/align/test.yml +++ b/tests/modules/bwameth/align/test.yml @@ -4,7 +4,7 @@ - bwameth - bwameth/align files: - - path: output/test_single_end/test.bam + - path: output/bwameth/test.bam - name: bwameth align paired-end test workflow command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c tests/config/nextflow.config @@ -12,5 +12,5 @@ - bwameth - bwameth/align files: - - path: output/test_paired_end/test.bam + - path: output/bwameth/test.bam diff --git a/tests/modules/genmap/index/main.nf b/tests/modules/genmap/index/main.nf index dfbdbbed..358ebb35 100644 --- a/tests/modules/genmap/index/main.nf +++ b/tests/modules/genmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [:] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [publish_dir:'genmap'] ) workflow test_genmap_index { diff --git a/tests/modules/genmap/index/test.yml b/tests/modules/genmap/index/test.yml index 8b06e75e..c5078014 100644 --- a/tests/modules/genmap/index/test.yml +++ b/tests/modules/genmap/index/test.yml @@ -4,45 +4,45 @@ - genmap - genmap/index files: - - path: output/index/genmap/index.ids.concat + - path: output/genmap/genmap/index.ids.concat md5sum: da6caa25f62c5407ccdfbcce1fa92408 - - path: output/index/genmap/index.ids.limits + - path: output/genmap/genmap/index.ids.limits md5sum: f82636c5da188aec131d3a809473eff1 - - path: output/index/genmap/index.info.concat + - path: output/genmap/genmap/index.info.concat md5sum: 8ba5273aa9e58722bf45b9cc39fc6bfe - - path: output/index/genmap/index.info.limits + - path: output/genmap/genmap/index.info.limits md5sum: 3522f2811f4ddf04598809fc84a1459e - - path: output/index/genmap/index.lf.drp + - path: output/genmap/genmap/index.lf.drp md5sum: dd85d6a23af2c7adf2695658e3056c08 - - path: output/index/genmap/index.lf.drp.sbl + - path: output/genmap/genmap/index.lf.drp.sbl md5sum: f1d3ff8443297732862df21dc4e57262 - - path: output/index/genmap/index.lf.drs + - path: output/genmap/genmap/index.lf.drs md5sum: 93b885adfe0da089cdf634904fd59f71 - - path: output/index/genmap/index.lf.drv + - path: output/genmap/genmap/index.lf.drv md5sum: e06b605496bd91b32afa3c4f56d934ac - - path: output/index/genmap/index.lf.drv.sbl + - path: output/genmap/genmap/index.lf.drv.sbl md5sum: 8dd6bb7329a71449b0a1b292b5999164 - - path: output/index/genmap/index.lf.pst + - path: output/genmap/genmap/index.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/genmap/index.rev.lf.drp + - path: output/genmap/genmap/index.rev.lf.drp md5sum: 5d9107e3aeec0721553dd661d4365fef - - path: output/index/genmap/index.rev.lf.drp.sbl + - path: output/genmap/genmap/index.rev.lf.drp.sbl md5sum: f1d3ff8443297732862df21dc4e57262 - - path: output/index/genmap/index.rev.lf.drs + - path: output/genmap/genmap/index.rev.lf.drs md5sum: 93b885adfe0da089cdf634904fd59f71 - - path: output/index/genmap/index.rev.lf.drv + - path: output/genmap/genmap/index.rev.lf.drv md5sum: df7e795edc0a034577a9d2599fe8cfeb - - path: output/index/genmap/index.rev.lf.drv.sbl + - path: output/genmap/genmap/index.rev.lf.drv.sbl md5sum: 8dd6bb7329a71449b0a1b292b5999164 - - path: output/index/genmap/index.rev.lf.pst + - path: output/genmap/genmap/index.rev.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/genmap/index.sa.ind + - path: output/genmap/genmap/index.sa.ind md5sum: e21e5c7ce887cc8e3d0fa44ab1019cab - - path: output/index/genmap/index.sa.len + - path: output/genmap/genmap/index.sa.len md5sum: 5dfc20cfe8ed9892451461a8d402f51c - - path: output/index/genmap/index.sa.val + - path: output/genmap/genmap/index.sa.val md5sum: 400ee7f2fe93b2000ae3a5da5e509730 - - path: output/index/genmap/index.txt.concat + - path: output/genmap/genmap/index.txt.concat md5sum: b4303962e0c176107945f3405370e6ae - - path: output/index/genmap/index.txt.limits + - path: output/genmap/genmap/index.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 diff --git a/tests/modules/ivar/trim/test.yml b/tests/modules/ivar/trim/test.yml index 013c6365..f2f46676 100644 --- a/tests/modules/ivar/trim/test.yml +++ b/tests/modules/ivar/trim/test.yml @@ -5,4 +5,4 @@ - ivar/trim files: - path: output/ivar/test.bam - md5sum: 8705d032b28a1c3dbfe78fa762a2132f + md5sum: 12cff17d43b1efdba8120a6bff5311e3 diff --git a/tests/modules/qualimap/bamqc/test.yml b/tests/modules/qualimap/bamqc/test.yml index 71a40c13..7d746a51 100644 --- a/tests/modules/qualimap/bamqc/test.yml +++ b/tests/modules/qualimap/bamqc/test.yml @@ -6,7 +6,7 @@ files: - path: ./output/qualimap/test/qualimapReport.html - path: ./output/qualimap/test/genome_results.txt - md5sum: 00ad697dbec5141428ac8d850c13e1c5 + md5sum: 5ec87ea86ad734d512c8c76fe8eb37b1 - path: ./output/qualimap/test/css/plus.png md5sum: 0125e6faa04e2cf0141a2d599d3bb220 - path: ./output/qualimap/test/css/down-pressed.png diff --git a/tests/modules/rsem/calculateexpression/main.nf b/tests/modules/rsem/calculateexpression/main.nf index ee01687e..e7de83a4 100644 --- a/tests/modules/rsem/calculateexpression/main.nf +++ b/tests/modules/rsem/calculateexpression/main.nf @@ -2,20 +2,21 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [args: "--star"]) -include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' addParams(options: [args: "--star --star-gzipped-read-file"]) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [args: "--star"]) +include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' addParams(options: [args: "--star --star-gzipped-read-file"]) workflow test_rsem_calculateexpression { + input = [ + [ id:'test', single_end:false, strandedness: 'forward' ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - input = [ [ id:'test', single_end:false, strandedness: 'forward' ], // meta map - [file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true)] - ] - RSEM_PREPAREREFERENCE ( fasta, gtf ) - RSEM_CALCULATEEXPRESSION( input, RSEM_PREPAREREFERENCE.out.index ) } diff --git a/tests/modules/rsem/preparereference/main.nf b/tests/modules/rsem/preparereference/main.nf index a579960b..2d4a9053 100644 --- a/tests/modules/rsem/preparereference/main.nf +++ b/tests/modules/rsem/preparereference/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [:]) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [publish_dir:'rsem']) workflow test_rsem_preparereference { diff --git a/tests/modules/rsem/preparereference/test.yml b/tests/modules/rsem/preparereference/test.yml index 1526120e..734a92b2 100644 --- a/tests/modules/rsem/preparereference/test.yml +++ b/tests/modules/rsem/preparereference/test.yml @@ -4,19 +4,19 @@ - rsem - rsem/preparereference files: - - path: output/index/rsem/genome.chrlist + - path: output/rsem/rsem/genome.chrlist md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/genome.fasta + - path: output/rsem/rsem/genome.fasta md5sum: f315020d899597c1b57e5fe9f60f4c3e - - path: output/index/rsem/genome.grp + - path: output/rsem/rsem/genome.grp md5sum: c2848a8b6d495956c11ec53efc1de67e - - path: output/index/rsem/genome.idx.fa + - path: output/rsem/rsem/genome.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.n2g.idx.fa + - path: output/rsem/rsem/genome.n2g.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.seq + - path: output/rsem/rsem/genome.seq md5sum: 94da0c6b88c33e63c9a052a11f4f57c1 - - path: output/index/rsem/genome.ti + - path: output/rsem/rsem/genome.ti md5sum: c9e4ae8d4d13a504eec2acf1b8589a66 - - path: output/index/rsem/genome.transcripts.fa + - path: output/rsem/rsem/genome.transcripts.fa md5sum: 050c521a2719c2ae48267c1e65218f29 diff --git a/tests/modules/star/genomegenerate/main.nf b/tests/modules/star/genomegenerate/main.nf index 4753de9e..7f9e3072 100644 --- a/tests/modules/star/genomegenerate/main.nf +++ b/tests/modules/star/genomegenerate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [:] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [publish_dir:'star'] ) workflow test_star_genomegenerate { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index 0a4bff80..1df59378 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -4,34 +4,34 @@ - star/genomegenerate - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: d0fbf2789ee1e9f60c352ba3655d9de4 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 5a1ec027e575c3d7c1851e6b80fb8c5d - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 diff --git a/tests/modules/stringtie/merge/main.nf b/tests/modules/stringtie/merge/main.nf index f0202c33..49ff5a41 100644 --- a/tests/modules/stringtie/merge/main.nf +++ b/tests/modules/stringtie/merge/main.nf @@ -2,37 +2,45 @@ nextflow.enable.dsl = 2 -include { STRINGTIE as STRINGTIE_FORWARD } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [ publish_dir:'test_stringtie_forward' ] ) -include { STRINGTIE as STRINGTIE_REVERSE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [ publish_dir:'test_stringtie_reverse' ] ) -include { STRINGTIE_MERGE as STRINGTIE_FORWARD_MERGE} from '../../../../modules/stringtie/merge/main.nf' addParams( options: [ publish_dir:'test_stringtie_forward_merge'] ) -include { STRINGTIE_MERGE as STRINGTIE_REVERSE_MERGE} from '../../../../modules/stringtie/merge/main.nf' addParams( options: [ publish_dir:'test_stringtie_reverse_merge'] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) +include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' addParams( options: [:] ) + /* * Test with forward strandedness */ workflow test_stringtie_forward_merge { - input = [ [ id:'test', strandedness:'forward' ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] ] - annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - - STRINGTIE_FORWARD ( input, annotation_gtf ) - STRINGTIE_FORWARD.out.transcript_gtf - .map { it -> it[1] } - .set { stringtie_gtf } - STRINGTIE_FORWARD_MERGE ( stringtie_gtf, annotation_gtf ) + input = [ + [ id:'test', strandedness:'forward' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STRINGTIE ( input, annotation_gtf ) + STRINGTIE + .out + .transcript_gtf + .map { it -> it[1] } + .set { stringtie_gtf } + + STRINGTIE_MERGE ( stringtie_gtf, annotation_gtf ) } /* * Test with reverse strandedness */ workflow test_stringtie_reverse_merge { - input = [ [ id:'test', strandedness:'reverse' ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] - ] - annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - - STRINGTIE_REVERSE ( input, annotation_gtf ) - STRINGTIE_REVERSE.out.transcript_gtf - .map { it -> it[1] } - .set { stringtie_gtf } - STRINGTIE_REVERSE_MERGE ( stringtie_gtf, annotation_gtf ) + input = [ + [ id:'test', strandedness:'reverse' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + STRINGTIE ( input, annotation_gtf ) + STRINGTIE + .out + .transcript_gtf + .map { it -> it[1] } + .set { stringtie_gtf } + + STRINGTIE_MERGE ( stringtie_gtf, annotation_gtf ) } diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index e49122be..ea47ad48 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -1,22 +1,22 @@ -- name: stringtie forward-strand merge +- name: stringtie merge forward-strand command: nextflow run ./tests/modules/stringtie/merge/ -entry test_stringtie_forward_merge -c tests/config/nextflow.config tags: - stringtie - stringtie/merge files: - - path: ./output/test_stringtie_forward_merge/stringtie.merged.gtf + - path: ./output/stringtie/stringtie.merged.gtf contains: - 'stringtie' - 'merge' - 'chr22' -- name: stringtie reverse-strand merge +- name: stringtie merge reverse-strand command: nextflow run ./tests/modules/stringtie/merge/ -entry test_stringtie_reverse_merge -c tests/config/nextflow.config tags: - stringtie - stringtie/merge files: - - path: ./output/test_stringtie_reverse_merge/stringtie.merged.gtf + - path: ./output/stringtie/stringtie.merged.gtf contains: - 'stringtie' - 'merge' diff --git a/tests/modules/stringtie/stringtie/main.nf b/tests/modules/stringtie/stringtie/main.nf index fc321f8f..b902cc41 100644 --- a/tests/modules/stringtie/stringtie/main.nf +++ b/tests/modules/stringtie/stringtie/main.nf @@ -2,27 +2,29 @@ nextflow.enable.dsl = 2 -include { STRINGTIE as STRINGTIE_FORWARD } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [ publish_dir:'test_stringtie_forward' ] ) -include { STRINGTIE as STRINGTIE_REVERSE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [ publish_dir:'test_stringtie_reverse' ] ) - +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) // // Test with forward strandedness // workflow test_stringtie_forward { - input = [ [ id:'test', strandedness:'forward' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] ] - annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) + input = [ + [ id:'test', strandedness:'forward' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE_FORWARD ( input, annotation_gtf ) + STRINGTIE ( input, annotation_gtf ) } // // Test with reverse strandedness // workflow test_stringtie_reverse { - input = [ [ id:'test', strandedness:'reverse' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] ] - annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) + input = [ + [ id:'test', strandedness:'reverse' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE_REVERSE ( input, annotation_gtf ) + STRINGTIE ( input, annotation_gtf ) } diff --git a/tests/modules/stringtie/stringtie/test.yml b/tests/modules/stringtie/stringtie/test.yml index 14eff6eb..28c1b3c2 100644 --- a/tests/modules/stringtie/stringtie/test.yml +++ b/tests/modules/stringtie/stringtie/test.yml @@ -1,43 +1,43 @@ -- name: test_stringtie_forward +- name: stringtie stringtie forward command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c tests/config/nextflow.config tags: - stringtie - stringtie/stringtie files: - - path: ./output/test_stringtie_forward/test.transcripts.gtf - - path: ./output/test_stringtie_forward/test.gene.abundance.txt + - path: ./output/stringtie/test.transcripts.gtf + - path: ./output/stringtie/test.gene.abundance.txt md5sum: 7d8bce7f2a922e367cedccae7267c22e - - path: ./output/test_stringtie_forward/test.coverage.gtf + - path: ./output/stringtie/test.coverage.gtf md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: ./output/test_stringtie_forward/test.ballgown/e_data.ctab + - path: ./output/stringtie/test.ballgown/e_data.ctab md5sum: 6b4cf69bc03f3f69890f972a0e8b7471 - - path: ./output/test_stringtie_forward/test.ballgown/i_data.ctab + - path: ./output/stringtie/test.ballgown/i_data.ctab md5sum: be3abe09740603213f83d50dcf81427f - - path: ./output/test_stringtie_forward/test.ballgown/t_data.ctab + - path: ./output/stringtie/test.ballgown/t_data.ctab md5sum: 3b66c065da73ae0dd41cc332eff6a818 - - path: ./output/test_stringtie_forward/test.ballgown/i2t.ctab + - path: ./output/stringtie/test.ballgown/i2t.ctab md5sum: 8a117c8aa4334b4c2d4711932b006fb4 - - path: ./output/test_stringtie_forward/test.ballgown/e2t.ctab + - path: ./output/stringtie/test.ballgown/e2t.ctab md5sum: e981c0038295ae54b63cedb1083f1540 -- name: test_stringtie_reverse +- name: stringtie stringtie reverse command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c tests/config/nextflow.config tags: - stringtie - stringtie/stringtie files: - - path: ./output/test_stringtie_reverse/test.transcripts.gtf - - path: ./output/test_stringtie_reverse/test.gene.abundance.txt + - path: ./output/stringtie/test.transcripts.gtf + - path: ./output/stringtie/test.gene.abundance.txt md5sum: 7385b870b955dae2c2ab78a70cf05cce - - path: ./output/test_stringtie_reverse/test.coverage.gtf + - path: ./output/stringtie/test.coverage.gtf md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: ./output/test_stringtie_reverse/test.ballgown/e_data.ctab + - path: ./output/stringtie/test.ballgown/e_data.ctab md5sum: 879b6696029d19c4737b562e9d149218 - - path: ./output/test_stringtie_reverse/test.ballgown/i_data.ctab + - path: ./output/stringtie/test.ballgown/i_data.ctab md5sum: be3abe09740603213f83d50dcf81427f - - path: ./output/test_stringtie_reverse/test.ballgown/t_data.ctab + - path: ./output/stringtie/test.ballgown/t_data.ctab md5sum: 3b66c065da73ae0dd41cc332eff6a818 - - path: ./output/test_stringtie_reverse/test.ballgown/i2t.ctab + - path: ./output/stringtie/test.ballgown/i2t.ctab md5sum: 8a117c8aa4334b4c2d4711932b006fb4 - - path: ./output/test_stringtie_reverse/test.ballgown/e2t.ctab + - path: ./output/stringtie/test.ballgown/e2t.ctab md5sum: e981c0038295ae54b63cedb1083f1540 From e971f538a99bfa190f36f9303bede205e00b90a4 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 28 Sep 2021 11:51:19 +0200 Subject: [PATCH 098/314] Module new version reporting fixes (#753) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Fix IQ tree * Fix picard markdup and merge sam * Fix plink/vcf version * Fix plink version output * Fix prokka version command * Fix pydamage * Try fixing markduplicates * Fix snpEff * Fix vcftools version * Fix pydamage and filtersamreads test run * Fix MarkDuplicates tests * Add missing unsorted checks * Remove MD5 sym due to stochasicity in BAM file --- modules/iqtree/main.nf | 2 +- modules/picard/markduplicates/main.nf | 2 +- modules/picard/mergesamfiles/main.nf | 4 ++-- modules/plink/vcf/main.nf | 2 +- modules/prokka/main.nf | 2 +- modules/pydamage/analyze/main.nf | 2 +- modules/pydamage/filter/main.nf | 2 +- modules/snpeff/main.nf | 2 +- modules/vcftools/main.nf | 2 +- tests/modules/iqtree/test.yml | 2 +- tests/modules/picard/filtersamreads/test.yml | 2 +- tests/modules/picard/markduplicates/main.nf | 3 ++- tests/modules/picard/markduplicates/test.yml | 10 +++++++--- tests/modules/picard/mergesamfiles/test.yml | 1 - tests/modules/pydamage/analyze/test.yml | 2 +- tests/modules/pydamage/filter/test.yml | 2 +- 16 files changed, 23 insertions(+), 19 deletions(-) diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index 28e07207..357faf33 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -41,7 +41,7 @@ process IQTREE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(iqtree -version 2>&1 | sed 's/^IQ-TREE multicore version \\([0-9\\.]*\\) .*\$/\\1/') + ${getSoftwareName(task.process)}: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') END_VERSIONS """ } diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 62cd10c2..dc8d460b 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -47,7 +47,7 @@ process PICARD_MARKDUPLICATES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard MarkDuplicates --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + ${getSoftwareName(task.process)}: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index e9cba284..c6ecfe58 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -45,7 +45,7 @@ process PICARD_MERGESAMFILES { OUTPUT=${prefix}.bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard MergeSamFiles --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else { @@ -53,7 +53,7 @@ process PICARD_MERGESAMFILES { ln -s ${bam_files[0]} ${prefix}.bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard MergeSamFiles --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index 697be55e..735fef88 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -41,7 +41,7 @@ process PLINK_VCF { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - plink: \$( plink --version 2>&1 | sed 's/^PLINK //' | sed 's/..-bit.*//' ) + plink: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) END_VERSIONS """ } diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index c2a9d682..8aefda7c 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -53,7 +53,7 @@ process PROKKA { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prokka --version 2>&1 | sed 's/^.*prokka //') + ${getSoftwareName(task.process)}: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') END_VERSIONS """ } diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 042e6c74..df787e44 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -37,7 +37,7 @@ process PYDAMAGE_ANALYZE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pydamage --version 2>&1 | sed -e 's/pydamage, version //g') + ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 9cb95b4a..87677367 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -37,7 +37,7 @@ process PYDAMAGE_FILTER { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pydamage --version 2>&1 | sed -e 's/pydamage, version //g') + ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index aa25a092..8b30360a 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -54,7 +54,7 @@ process SNPEFF { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snpEff -version 2>&1) + ${getSoftwareName(task.process)}: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') END_VERSIONS """ } diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 7ae1619f..a8d8969c 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -126,7 +126,7 @@ process VCFTOOLS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(vcftools --version 2>&1 | sed 's/^.*vcftools //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') END_VERSIONS """ } diff --git a/tests/modules/iqtree/test.yml b/tests/modules/iqtree/test.yml index 7bacd0e6..e40656a2 100644 --- a/tests/modules/iqtree/test.yml +++ b/tests/modules/iqtree/test.yml @@ -1,4 +1,4 @@ -- name: iqtree +- name: iqtree test workflow command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c tests/config/nextflow.config tags: - iqtree diff --git a/tests/modules/picard/filtersamreads/test.yml b/tests/modules/picard/filtersamreads/test.yml index 34dd85c4..e8e73ed0 100644 --- a/tests/modules/picard/filtersamreads/test.yml +++ b/tests/modules/picard/filtersamreads/test.yml @@ -8,7 +8,7 @@ md5sum: b44a6ca04811a9470c7813c3c9465fd5 -- name: picard filtersamreads_readlist +- name: picard filtersamreads readlist command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config tags: - picard diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 2d4ff746..78643f8b 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -3,6 +3,7 @@ nextflow.enable.dsl = 2 include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : '--ASSUME_SORT_ORDER queryname' ] ) workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map @@ -17,5 +18,5 @@ workflow test_picard_markduplicates_unsorted_bam { file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] - PICARD_MARKDUPLICATES ( input ) + PICARD_MARKDUPLICATES_UNSORTED ( input ) } diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index ffb385f3..04075548 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -1,4 +1,4 @@ -- name: picard markduplicates on sorted bam +- name: picard markduplicates sorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c tests/config/nextflow.config tags: - picard @@ -7,9 +7,13 @@ - path: ./output/picard/test.MarkDuplicates.metrics.txt - path: ./output/picard/test.bam md5sum: b520ccdc3a9edf3c6a314983752881f2 -- name: picard markduplicates on unsorted bam +- name: picard markduplicates unsorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config tags: - picard - picard/markduplicates - exit_code: 1 + files: + - path: ./output/picard/test.MarkDuplicates.metrics.txt + - path: ./output/picard/test.bam + md5sum: 46a6fc76048ba801d328f869ac9db020 + diff --git a/tests/modules/picard/mergesamfiles/test.yml b/tests/modules/picard/mergesamfiles/test.yml index 114c1f01..a331c96f 100644 --- a/tests/modules/picard/mergesamfiles/test.yml +++ b/tests/modules/picard/mergesamfiles/test.yml @@ -5,4 +5,3 @@ - picard/mergesamfiles files: - path: ./output/picard/test.bam - md5sum: 82bb91735aff82eae4f0b631114e9e15 diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml index c54e64a4..e480c1b4 100644 --- a/tests/modules/pydamage/analyze/test.yml +++ b/tests/modules/pydamage/analyze/test.yml @@ -1,4 +1,4 @@ -- name: test_pydamage_analyze +- name: pydamage analyze test workflow command: nextflow run tests/modules/pydamage/analyze -entry test_pydamage -c tests/config/nextflow.config tags: - pydamage diff --git a/tests/modules/pydamage/filter/test.yml b/tests/modules/pydamage/filter/test.yml index e131d505..248be44b 100644 --- a/tests/modules/pydamage/filter/test.yml +++ b/tests/modules/pydamage/filter/test.yml @@ -1,4 +1,4 @@ -- name: test_pydamage_filter +- name: pydamage filter test workflow command: nextflow run tests/modules/pydamage/filter -entry test_pydamage -c tests/config/nextflow.config tags: - pydamage From 9c31cf1566fa4f8660ac3973e02fe0caebe86235 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 28 Sep 2021 14:37:47 +0100 Subject: [PATCH 099/314] Fix version commands: round 3 (#754) * Fix version commands: round 3 * Fix seqkit/split2 modules --- modules/bbmap/align/main.nf | 2 + modules/bowtie/align/main.nf | 1 + modules/bowtie2/align/main.nf | 5 ++ modules/bwa/mem/main.nf | 1 + modules/bwa/sampe/main.nf | 1 + modules/bwa/samse/main.nf | 1 + modules/bwamem2/mem/main.nf | 1 + modules/chromap/index/main.nf | 9 ++- modules/hisat2/align/main.nf | 2 + modules/kraken2/kraken2/main.nf | 1 + modules/rapidnj/main.nf | 12 ++-- modules/rsem/calculateexpression/main.nf | 5 +- modules/rsem/preparereference/main.nf | 2 + modules/samtools/faidx/main.nf | 4 +- modules/seacr/callpeak/main.nf | 2 + modules/seqkit/split2/main.nf | 60 +++++++++---------- modules/seqtk/sample/main.nf | 4 +- modules/seqtk/subseq/main.nf | 2 +- modules/sequenzautils/bam2seqz/main.nf | 8 +-- modules/sequenzautils/gcwiggle/main.nf | 4 +- modules/shovill/main.nf | 2 +- modules/star/genomegenerate/main.nf | 4 ++ modules/tiddit/sv/main.nf | 5 +- modules/ucsc/bedgraphtobigwig/main.nf | 6 +- modules/ucsc/bigwigaverageoverbed/main.nf | 14 +++-- modules/unicycler/main.nf | 2 +- modules/yara/index/main.nf | 12 ++-- modules/yara/mapper/main.nf | 53 +++++++++------- tests/modules/seacr/callpeak/main.nf | 9 +-- tests/modules/seqkit/split2/test.yml | 42 ++++++------- tests/modules/sequenzautils/bam2seqz/test.yml | 2 +- .../modules/ucsc/bigwigaverageoverbed/main.nf | 7 ++- tests/modules/yara/index/main.nf | 4 +- tests/modules/yara/index/test.yml | 24 ++++---- 34 files changed, 184 insertions(+), 129 deletions(-) diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 8235e78d..63989be0 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -57,6 +57,8 @@ process BBMAP_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(bbversion.sh) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ } diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 73554fa2..060c5fc4 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -58,6 +58,7 @@ process BOWTIE_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 7d33da03..00bcf83c 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -48,6 +48,8 @@ process BOWTIE2_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ } else { @@ -70,9 +72,12 @@ process BOWTIE2_ALIGN { if [ -f ${prefix}.unmapped.fastq.2.gz ]; then mv ${prefix}.unmapped.fastq.2.gz ${prefix}.unmapped_2.fastq.gz fi + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ } diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 05e5260c..a081a69a 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -45,6 +45,7 @@ process BWA_MEM { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index ae2998d8..f4519541 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -44,6 +44,7 @@ process BWA_SAMPE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 89310153..5303b24f 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -44,6 +44,7 @@ process BWA_SAMSE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index f47bfea3..d21b8b99 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -46,6 +46,7 @@ process BWAMEM2_MEM { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index e52ffe4b..61b7a856 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -24,14 +24,16 @@ process CHROMAP_INDEX { path fasta output: - path "*.index" , emit: index - path "versions.yml" , emit: version + path "*.index" , emit: index + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) def prefix = fasta.baseName """ - chromap -i $options.args \\ + chromap \\ + -i \\ + $options.args \\ -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index @@ -39,6 +41,7 @@ process CHROMAP_INDEX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo "$VERSION") + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 21eb3c7d..583ddc3f 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -62,6 +62,7 @@ process HISAT2_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo $VERSION) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { @@ -93,6 +94,7 @@ process HISAT2_ALIGN { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo $VERSION) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 4000d12a..cc269e98 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -51,6 +51,7 @@ process KRAKEN2_KRAKEN2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ } diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index a46fbfe8..118ea7af 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -4,6 +4,8 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) +def VERSION = '2.3.2' // No version information printed + process RAPIDNJ { label 'process_medium' publishDir "${params.outdir}", @@ -21,9 +23,9 @@ process RAPIDNJ { path alignment output: - path "*.sth" , emit: stockholm_alignment - path "*.tre" , emit: phylogeny - path "versions.yml" , emit: version + path "*.sth" , emit: stockholm_alignment + path "*.tre" , emit: phylogeny + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) @@ -38,10 +40,10 @@ process RAPIDNJ { -c $task.cpus \\ -x rapidnj_phylogeny.tre - # Doesn't appear to be a way of getting the version number cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo 2.3.2) + ${getSoftwareName(task.process)}: \$(echo $VERSION) + biopython: \$(python -c "import Bio; print(Bio.__version__)") END_VERSIONS """ } diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index 33f34904..d3d11397 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -34,8 +34,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - def software = getSoftwareName(task.process) - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def software = getSoftwareName(task.process) + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -59,6 +59,7 @@ process RSEM_CALCULATEEXPRESSION { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 560b5a63..b4a613bd 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -53,6 +53,7 @@ process RSEM_PREPAREREFERENCE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } else { @@ -67,6 +68,7 @@ process RSEM_PREPAREREFERENCE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index f7d6cbef..cdbae99b 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -22,8 +22,8 @@ process SAMTOOLS_FAIDX { path fasta output: - path "*.fai" , emit: fai - path "versions.yml" , emit: version + path "*.fai" , emit: fai + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index cc567dfb..8892ab6d 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -40,6 +40,8 @@ process SEACR_CALLPEAK { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo $VERSION) + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") + r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 44e0046f..b178b1da 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -7,13 +7,11 @@ options = initOptions(params.options) process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0" } else { @@ -24,42 +22,40 @@ process SEQKIT_SPLIT2 { tuple val(meta), path(reads) output: - tuple val(meta), path("*.split/*.gz"), emit: reads - path("versions.yml") , emit: version - + tuple val(meta), path("*${prefix}/*.gz"), emit: reads + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if(meta.single_end){ - """ - seqkit \ - split2 \ - $options.args \ - --threads $task.cpus \ - -1 ${reads} \ - --out-dir ${prefix}.split + """ + seqkit \\ + split2 \\ + $options.args \\ + --threads $task.cpus \\ + -1 $reads \\ + --out-dir $prefix - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(seqkit --version 2>&1 | sed 's/^.*seqkit //; s/Using.*\$//') - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ } else { - """ - seqkit \ - split2 \ - $options.args \ - --threads $task.cpus \ - -1 ${reads[0]} \ - -2 ${reads[1]} \ - --out-dir ${prefix}.split + """ + seqkit \\ + split2 \\ + $options.args \\ + --threads $task.cpus \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + --out-dir $prefix - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(seqkit --version 2>&1 | sed 's/^.*seqkit //; s/Using.*\$//') - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ } } diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index d62d8cac..277d74ca 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -40,7 +40,7 @@ process SEQTK_SAMPLE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { @@ -64,7 +64,7 @@ process SEQTK_SAMPLE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index 2907d282..41326402 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -43,7 +43,7 @@ process SEQTK_SUBSEQ { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(seqtk 2>&1 | sed 's/^.*Version: //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index ad4f6847..9c4fc12f 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -24,8 +24,8 @@ process SEQUENZAUTILS_BAM2SEQZ { path wigfile output: - tuple val(meta), path("*.seqz.gz"), emit: seqz - path "versions.yml" , emit: version + tuple val(meta), path("*.gz"), emit: seqz + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -38,11 +38,11 @@ process SEQUENZAUTILS_BAM2SEQZ { -t $tumourbam \\ --fasta $fasta \\ -gc $wigfile \\ - -o ${prefix}.seqz.gz + -o ${prefix}.gz cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(sequenzautils --version 2>&1 | sed 's/^.*sequenzautils //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index a0575d7e..a352256a 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -23,7 +23,7 @@ process SEQUENZAUTILS_GCWIGGLE { output: tuple val(meta), path("*.wig.gz"), emit: wig - path "versions.yml" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) @@ -37,7 +37,7 @@ process SEQUENZAUTILS_GCWIGGLE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(sequenzautils --version 2>&1 | sed 's/^.*sequenzautils //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index 8319e75f..92b10732 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -44,7 +44,7 @@ process SHOVILL { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(shovill --version 2>&1 | sed 's/^.*shovill //' ) + ${getSoftwareName(task.process)}: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') END_VERSIONS """ } diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index 5ccb38e8..520f6b21 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -46,6 +46,8 @@ process STAR_GENOMEGENERATE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS """ } else { @@ -67,6 +69,8 @@ process STAR_GENOMEGENERATE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS """ } diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index b7fe1b03..fce5c49b 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -35,14 +35,15 @@ process TIDDIT_SV { def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ - --sv $options.args \\ + --sv \\ + $options.args \\ --bam $bam \\ $reference \\ -o $prefix cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(tiddit -h 2>&1 | sed 's/^.*Version: //; s/(.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index 60e046f9..4a779644 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -32,7 +32,11 @@ process UCSC_BEDGRAPHTOBIGWIG { def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ - bedGraphToBigWig $bedgraph $sizes ${prefix}.bigWig + bedGraphToBigWig \\ + $bedgraph \\ + $sizes \\ + ${prefix}.bigWig + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo $VERSION) diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index adba3c76..76c00cfe 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -4,6 +4,8 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) +def VERSION = '377' + process UCSC_BIGWIGAVERAGEOVERBED { tag "$meta.id" label 'process_medium' @@ -23,19 +25,23 @@ process UCSC_BIGWIGAVERAGEOVERBED { path bigwig output: - tuple val(meta), path("*.tab") , emit: tab - path "versions.yml" , emit: version + tuple val(meta), path("*.tab"), emit: tab + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. - bigWigAverageOverBed ${options.args} $bigwig $bed ${bed.getSimpleName()}.tab + bigWigAverageOverBed \\ + $options.args \\ + $bigwig \\ + $bed \\ + ${prefix}.tab cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bigWigAverageOverBed 2>&1 | sed 's/bigWigAverageOverBed v//; s/ - Compute.*\$//') + ${getSoftwareName(task.process)}: \$(echo $VERSION) END_VERSIONS """ } diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 2b031c42..1dd97c40 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -44,7 +44,7 @@ process UNICYCLER { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(unicycler --version 2>&1 | sed 's/^.*Unicycler v//; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index c621e866..e99d99ba 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -22,21 +22,25 @@ process YARA_INDEX { path fasta output: - path "yara", emit: index - path "versions.yml" , emit: version + path "yara" , emit: index + path "versions.yml", emit: version script: def software = getSoftwareName(task.process) """ mkdir yara - yara_indexer $fasta -o "yara" + + yara_indexer \\ + $fasta \\ + -o "yara" + mv *.{lf,rid,sa,txt}.* yara cp $fasta yara/yara.fasta cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(yara_indexer --help 2>&1 | grep -e "yara_indexer version:" | sed 's/yara_indexer version: //g') + ${getSoftwareName(task.process)}: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 3404d591..88e3d411 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -24,31 +24,44 @@ process YARA_MAPPER { output: tuple val(meta), path("*.mapped.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if (meta.single_end) { + """ + yara_mapper \\ + $options.args \\ + -t $task.cpus \\ + -f bam \\ + ${index}/yara \\ + $reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam - if(meta.single_end) { - """ - yara_mapper $options.args -t ${task.cpus} -f bam ${index}/yara $reads | samtools view -@ ${task.cpus} -hb -F4 > ${prefix}.mapped.bam - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(yara_mapper --help 2>&1) - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } else { - """ - yara_mapper $options.args -t ${task.cpus} -f bam ${index}/yara ${reads[0]} ${reads[1]} > output.bam - samtools view -@ ${task.cpus} -hF 4 -f 0x40 -b output.bam > ${prefix}_1.mapped.bam - samtools view -@ ${task.cpus} -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(yara_mapper --version 2>&1 | grep -e "yara_mapper version:" | sed 's/yara_mapper version: //g') - END_VERSIONS - """ - } + """ + yara_mapper \\ + $options.args \\ + -t ${task.cpus} \\ + -f bam \\ + ${index}/yara \\ + ${reads[0]} \\ + ${reads[1]} > output.bam + samtools view -@ $task.cpus -hF 4 -f 0x40 -b output.bam > ${prefix}_1.mapped.bam + samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ + } } diff --git a/tests/modules/seacr/callpeak/main.nf b/tests/modules/seacr/callpeak/main.nf index 82fd6eb3..7e9cef8a 100644 --- a/tests/modules/seacr/callpeak/main.nf +++ b/tests/modules/seacr/callpeak/main.nf @@ -5,10 +5,11 @@ nextflow.enable.dsl = 2 include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' addParams( options: [ args:'norm stringent' ] ) workflow test_seacr_callpeak { - input = [ [ id:'test_1'], - file("${launchDir}/tests/data/generic/bedgraph/K27me3_1_to_chr20.bedgraph", checkIfExists: true), - file("${launchDir}/tests/data/generic/bedgraph/IgG_1_to_chr20.bedgraph", checkIfExists: true) - ] + input = [ + [ id:'test_1'], + file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/bedgraph/K27me3_1_to_chr20.bedgraph", checkIfExists: true), + file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/bedgraph/IgG_1_to_chr20.bedgraph", checkIfExists: true) + ] SEACR_CALLPEAK ( input ) } diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index b3e0e020..13f3b003 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -4,9 +4,9 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: cf38c51506e45380fe25abdd1bd5ccc6 - name: seqkit split2 single-end size @@ -15,9 +15,9 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: bf835e685d597fc1ab5e5ac7dd689619 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 - name: seqkit split2 single-end part @@ -26,11 +26,11 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: fa25951435471238d5567fd2cae31f55 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a - - path: output/seqkit/test.split/test_1.part_003.fastq.gz + - path: output/seqkit/test/test_1.part_003.fastq.gz md5sum: 8bc86ba83a611c54f592f4eae19b680f - name: seqkit split2 paired-end length @@ -39,13 +39,13 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: cf38c51506e45380fe25abdd1bd5ccc6 - - path: output/seqkit/test.split/test_2.part_001.fastq.gz + - path: output/seqkit/test/test_2.part_001.fastq.gz md5sum: 6b094b1ba7c439fe44c1bb5e99a02ba4 - - path: output/seqkit/test.split/test_2.part_002.fastq.gz + - path: output/seqkit/test/test_2.part_002.fastq.gz md5sum: 927097c6ac7522199a9e016333181a8e - name: seqkit split2 paired-end size @@ -54,13 +54,13 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: bf835e685d597fc1ab5e5ac7dd689619 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 - - path: output/seqkit/test.split/test_2.part_001.fastq.gz + - path: output/seqkit/test/test_2.part_001.fastq.gz md5sum: 09d0dd83b5b1b9b95d316eeed79ea5ba - - path: output/seqkit/test.split/test_2.part_002.fastq.gz + - path: output/seqkit/test/test_2.part_002.fastq.gz md5sum: 8796c3f327b1094244bfcdb36d536526 - name: seqkit split2 paired-end part @@ -69,15 +69,15 @@ - seqkit - seqkit/split2 files: - - path: output/seqkit/test.split/test_1.part_001.fastq.gz + - path: output/seqkit/test/test_1.part_001.fastq.gz md5sum: fa25951435471238d5567fd2cae31f55 - - path: output/seqkit/test.split/test_1.part_002.fastq.gz + - path: output/seqkit/test/test_1.part_002.fastq.gz md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a - - path: output/seqkit/test.split/test_1.part_003.fastq.gz + - path: output/seqkit/test/test_1.part_003.fastq.gz md5sum: 8bc86ba83a611c54f592f4eae19b680f - - path: output/seqkit/test.split/test_2.part_001.fastq.gz + - path: output/seqkit/test/test_2.part_001.fastq.gz md5sum: f0055c99cd193fd97466b3cde9dd1b8f - - path: output/seqkit/test.split/test_2.part_002.fastq.gz + - path: output/seqkit/test/test_2.part_002.fastq.gz md5sum: 8a90df768201785f7a7cd5dbb41e846a - - path: output/seqkit/test.split/test_2.part_003.fastq.gz + - path: output/seqkit/test/test_2.part_003.fastq.gz md5sum: 890b90083e8e1606bd13ba34149cedd7 diff --git a/tests/modules/sequenzautils/bam2seqz/test.yml b/tests/modules/sequenzautils/bam2seqz/test.yml index d8a21430..0b9cac53 100644 --- a/tests/modules/sequenzautils/bam2seqz/test.yml +++ b/tests/modules/sequenzautils/bam2seqz/test.yml @@ -4,5 +4,5 @@ - sequenzautils - sequenzautils/bam2seqz files: - - path: output/sequenzautils/test.seqz.gz + - path: output/sequenzautils/test.gz md5sum: 12b41979a498ac10c0aff162b12e6a6e diff --git a/tests/modules/ucsc/bigwigaverageoverbed/main.nf b/tests/modules/ucsc/bigwigaverageoverbed/main.nf index 88310a0b..9bd5a5e2 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/tests/modules/ucsc/bigwigaverageoverbed/main.nf @@ -5,9 +5,10 @@ nextflow.enable.dsl = 2 include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' addParams( options: [:] ) workflow test_ucsc_bigwigaverageoverbed { - input = [ [ id: 'test' ], // meta map - [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true ) ] - ] + input = [ + [ id: 'test' ], // meta map + [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true ) ] + ] bigwig = file(params.test_data['sarscov2']['illumina']['test_bigwig'], checkIfExists: true) UCSC_BIGWIGAVERAGEOVERBED ( input, bigwig ) diff --git a/tests/modules/yara/index/main.nf b/tests/modules/yara/index/main.nf index dcedc61b..35a86182 100644 --- a/tests/modules/yara/index/main.nf +++ b/tests/modules/yara/index/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: [:] ) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: [publish_dir:'yara'] ) workflow test_yara_index { - def input = file("${launchDir}/tests/data/genomics/sarscov2/genome/genome.fasta", checkIfExists: true) + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) YARA_INDEX ( input ) } diff --git a/tests/modules/yara/index/test.yml b/tests/modules/yara/index/test.yml index 384aeee8..de6f1cf6 100644 --- a/tests/modules/yara/index/test.yml +++ b/tests/modules/yara/index/test.yml @@ -4,27 +4,27 @@ - yara/index - yara files: - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 From 512f5dfc27397cae436d9fc2e82aa055fc89d31a Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Tue, 28 Sep 2021 16:51:35 +0200 Subject: [PATCH 100/314] Better error message for FileNotFoundErrors (#755) * Better error message for FileNotFoundErrors * Update tests/test_versions_yml.py Co-authored-by: James A. Fellows Yates * Update test_versions_yml.py Co-authored-by: James A. Fellows Yates --- tests/test_versions_yml.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/tests/test_versions_yml.py b/tests/test_versions_yml.py index c3944a0f..b6392b87 100644 --- a/tests/test_versions_yml.py +++ b/tests/test_versions_yml.py @@ -2,6 +2,7 @@ from pathlib import Path import pytest import yaml import re +from textwrap import dedent def _get_workflow_names(): @@ -21,7 +22,22 @@ def _get_workflow_names(): def test_ensure_valid_version_yml(workflow_dir): workflow_dir = Path(workflow_dir) software_name = workflow_dir.name.split("_")[0].lower() - versions_yml = (workflow_dir / f"output/{software_name}/versions.yml").read_text() + try: + versions_yml_file = workflow_dir / f"output/{software_name}/versions.yml" + versions_yml = versions_yml_file.read_text() + except FileNotFoundError: + raise AssertionError( + dedent( + f"""\ + `versions.yml` not found in the output directory. + Expected path: `{versions_yml_file}` + + This can have multiple reasons: + * The test-workflow failed before a `versions.yml` could be generated. + * The workflow name in `test.yml` does not start with the tool name. + """ + ) + ) assert ( "END_VERSIONS" not in versions_yml @@ -29,7 +45,9 @@ def test_ensure_valid_version_yml(workflow_dir): # Raises an exception if yaml is not valid versions = yaml.safe_load(versions_yml) - assert len(versions) == 1, "The top-level of versions.yml must contain exactely one entry: the process name as dict key" + assert ( + len(versions) == 1 + ), "The top-level of versions.yml must contain exactly one entry: the process name as dict key" software_versions = next(iter(versions.values())) assert len(software_versions), "There must be at least one version emitted." for tool, version in software_versions.items(): From cde1d827f993f71ac5e07de4381d80f81d4f813e Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Tue, 28 Sep 2021 18:33:07 +0200 Subject: [PATCH 101/314] Fix yml ver (#756) * Fix kallisto index tests * Fix nanoplot * Fix kallistobustools * Fix pairix * Fix plasmidid * Fix pbccs * Fix raxmlng * Fix prokka * Fix shovill * Fix typo * Deleted workflow by mistake added again --- modules/kallisto/index/main.nf | 2 +- modules/kallisto/index/meta.yml | 4 ++-- modules/kallistobustools/count/main.nf | 2 +- modules/kallistobustools/ref/main.nf | 4 ++-- modules/nanoplot/main.nf | 2 +- modules/pairix/main.nf | 2 +- modules/pbccs/main.nf | 2 +- modules/plasmidid/main.nf | 2 +- modules/raxmlng/main.nf | 2 +- tests/modules/kallisto/index/main.nf | 2 +- tests/modules/kallistobustools/count/main.nf | 2 +- tests/modules/kallistobustools/ref/main.nf | 1 + tests/modules/plasmidid/test.yml | 2 -- 13 files changed, 14 insertions(+), 15 deletions(-) diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 00ae9601..801f339e 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -36,7 +36,7 @@ process KALLISTO_INDEX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kallisto 2>&1 | sed 's/^kallisto //; s/Usage.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/kallisto/index/meta.yml b/modules/kallisto/index/meta.yml index ba4855b0..6080eb77 100644 --- a/modules/kallisto/index/meta.yml +++ b/modules/kallisto/index/meta.yml @@ -1,7 +1,7 @@ name: kallisto_index -description: write your description here +description: Create kallisto index keywords: - - sort + - index tools: - kallisto: description: Quantifying abundances of transcripts from bulk and single-cell RNA-Seq data, or more generally of target sequences using high-throughput sequencing reads. diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 309bd57c..b0dd3a06 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -53,7 +53,7 @@ process KALLISTOBUSTOOLS_COUNT { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index bc9b32f5..c8e02687 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -47,7 +47,7 @@ process KALLISTOBUSTOOLS_REF { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } else { @@ -66,7 +66,7 @@ process KALLISTOBUSTOOLS_REF { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kb 2>&1 | sed 's/^.*kb_python //;s/positional arguments.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index 86b300f5..e36b2da2 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -39,7 +39,7 @@ process NANOPLOT { $input_file cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(NanoPlot --version 2>&1 | sed 's/^.*NanoPlot //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index 684ea7e6..c00af657 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -34,7 +34,7 @@ process PAIRIX { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairix --help 2>&1 | sed 's/^.*Version: //; s/Usage.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index ccf17cc4..5df852cf 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -51,7 +51,7 @@ process PBCCS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(ccs --version 2>&1 | grep -e 'commit') + ${getSoftwareName(task.process)}: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 8be58c57..792b3c12 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -47,7 +47,7 @@ process PLASMIDID { mv NO_GROUP/$prefix ./$prefix cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plasmidID --version 2>&1) + ${getSoftwareName(task.process)}: \$(echo \$(plasmidID --version 2>&1)) END_VERSIONS """ } diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index 7094eaa7..e3bde2f3 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -36,7 +36,7 @@ process RAXMLNG { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(raxml-ng --version 2>&1 | sed 's/^.*RAxML-NG v. //; s/released.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') END_VERSIONS """ } diff --git a/tests/modules/kallisto/index/main.nf b/tests/modules/kallisto/index/main.nf index bab78f51..7c6078f8 100644 --- a/tests/modules/kallisto/index/main.nf +++ b/tests/modules/kallisto/index/main.nf @@ -7,7 +7,7 @@ include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' add workflow test_kallisto_index { def input = [] - input = file("${launchDir}/tests/data/genomics/sarscov2/genome/genome.fasta", checkIfExists: true) + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) KALLISTO_INDEX ( input ) } diff --git a/tests/modules/kallistobustools/count/main.nf b/tests/modules/kallistobustools/count/main.nf index 4400976a..9172ddfc 100644 --- a/tests/modules/kallistobustools/count/main.nf +++ b/tests/modules/kallistobustools/count/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger"] ) +include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger -m 1"] ) workflow test_kallistobustools_count { diff --git a/tests/modules/kallistobustools/ref/main.nf b/tests/modules/kallistobustools/ref/main.nf index 1ecfa339..31b36d0d 100644 --- a/tests/modules/kallistobustools/ref/main.nf +++ b/tests/modules/kallistobustools/ref/main.nf @@ -30,3 +30,4 @@ workflow test_kallistobustools_ref_nucleus { KALLISTOBUSTOOLS_REF( fasta, gtf, workflow) } + diff --git a/tests/modules/plasmidid/test.yml b/tests/modules/plasmidid/test.yml index dd472a72..838af394 100644 --- a/tests/modules/plasmidid/test.yml +++ b/tests/modules/plasmidid/test.yml @@ -12,7 +12,6 @@ - path: output/plasmidid/test/data/test.fna md5sum: 503a5e1d4654bb2df19420e211070db3 - path: output/plasmidid/test/data/test.gbk - md5sum: c851bba9da6ec72cce591617067df50b - path: output/plasmidid/test/data/test.gff md5sum: 3ed8912ee9b0712ca491fa78ff5f4da1 - path: output/plasmidid/test/data/test.karyotype_individual.txt @@ -28,7 +27,6 @@ - path: output/plasmidid/test/database/test.fna md5sum: 6b843fe652b4369addb382f61952c3dd - path: output/plasmidid/test/database/test.gbk - md5sum: 1f7972ecbb868823727157d2c482700d - path: output/plasmidid/test/database/test.gff md5sum: 7e65da147d0a413020b0d92b7b03ffcd - path: output/plasmidid/test/fasta_files/MT192765.1_term.fasta From c3687ef54bc0f651ea332c228317baf34c643c18 Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Tue, 28 Sep 2021 21:25:10 +0200 Subject: [PATCH 102/314] fix more ver yml (#757) * Fix featurecounts * whitespace change to trigger CI on yara/mapper * update test yaml --- modules/subread/featurecounts/main.nf | 2 +- tests/modules/subread/featurecounts/test.yml | 18 +++++++++--------- tests/modules/yara/mapper/main.nf | 4 ++-- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 3e2eb765..76209a0d 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -49,7 +49,7 @@ process SUBREAD_FEATURECOUNTS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(featureCounts -v 2>&1 | sed -e "s/featureCounts v//g") + ${getSoftwareName(task.process)}: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") END_VERSIONS """ } diff --git a/tests/modules/subread/featurecounts/test.yml b/tests/modules/subread/featurecounts/test.yml index 2cba9e15..be6bed47 100644 --- a/tests/modules/subread/featurecounts/test.yml +++ b/tests/modules/subread/featurecounts/test.yml @@ -4,10 +4,10 @@ - subread - subread/featurecounts files: - - path: output/subread/test.featureCounts.txt.summary - md5sum: d78617192451a57f6ef249ddcaf13720 - path: output/subread/test.featureCounts.txt - md5sum: b0a1f7563afe49007f422d4c9ca5ee6c + md5sum: 0012df4c0a0e47eec1440017ab34f75f + - path: output/subread/test.featureCounts.txt.summary + md5sum: 8f602ff9a8ef467af43294e80b367cdf - name: subread featurecounts test_subread_featurecounts_reverse command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c tests/config/nextflow.config @@ -15,10 +15,10 @@ - subread - subread/featurecounts files: - - path: output/subread/test.featureCounts.txt.summary - md5sum: 4217004d0b55f870f77092364f59e44d - path: output/subread/test.featureCounts.txt - md5sum: 412840a8880cd29674b3d5404d3de19b + md5sum: 8175816b8260ed444d59232bd7e7120b + - path: output/subread/test.featureCounts.txt.summary + md5sum: 7cfa30ad678b9bc1bc63afbb0281547b - name: subread featurecounts test_subread_featurecounts_unstranded command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c tests/config/nextflow.config @@ -26,7 +26,7 @@ - subread - subread/featurecounts files: - - path: output/subread/test.featureCounts.txt.summary - md5sum: ee585faeb1edfcd2188a5e486a0e98a9 - path: output/subread/test.featureCounts.txt - md5sum: 6b684e11a1e54bec7e1ee5e3f651d7fd + md5sum: 3307d31b44a5d6bb3389786bb8f4e91f + - path: output/subread/test.featureCounts.txt.summary + md5sum: 23164b79f9f23f11c82820db61a35560 diff --git a/tests/modules/yara/mapper/main.nf b/tests/modules/yara/mapper/main.nf index 06582eb3..9cdce40d 100644 --- a/tests/modules/yara/mapper/main.nf +++ b/tests/modules/yara/mapper/main.nf @@ -3,8 +3,8 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: ['args': '-e 3'] ) -include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' addParams( options: ['args': '-e 3'] ) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams(options: ['args': '-e 3']) +include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' addParams(options: ['args': '-e 3']) workflow test_yara_single_end { From 5c463ca6b46b0a452253f5ae5ce7b8253674cff0 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 28 Sep 2021 13:42:04 -0600 Subject: [PATCH 103/314] few version and test data config fixes (#758) * update kleborate version info * fix stderr capture * few more version fixes * fix version info on more tools --- modules/chromap/chromap/main.nf | 13 +++++-------- modules/delly/call/main.nf | 2 +- modules/ensemblvep/main.nf | 2 +- modules/expansionhunter/main.nf | 2 +- modules/fgbio/callmolecularconsensusreads/main.nf | 3 ++- modules/fgbio/sortbam/main.nf | 2 +- modules/glnexus/main.nf | 3 ++- modules/kleborate/main.nf | 2 +- modules/minia/main.nf | 5 +++-- tests/config/test_data.config | 1 + tests/modules/delly/call/test.yml | 2 +- tests/modules/hifiasm/main.nf | 4 ++-- 12 files changed, 21 insertions(+), 20 deletions(-) diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 00aae27e..c7b0a5a3 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -41,9 +41,7 @@ process CHROMAP_CHROMAP { def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def args = options.args.tokenize() - def file_extension = options.args.contains("--SAM")? 'sam' : - options.args.contains("--TagAlign")? 'tagAlign' : - options.args.contains("--pairs")? 'pairs' : 'bed' + def file_extension = options.args.contains("--SAM") ? 'sam' : options.args.contains("--TagAlign")? 'tagAlign' : options.args.contains("--pairs")? 'pairs' : 'bed' if (barcodes) { args << "-b ${barcodes.join(',')}" if (whitelist) { @@ -56,9 +54,8 @@ process CHROMAP_CHROMAP { if (pairs_chr_order){ args << "--pairs-natural-chr-order $pairs_chr_order" } - def compression_cmds = """ - gzip ${prefix}.${file_extension} - """ + def final_args = args.join(' ') + def compression_cmds = "gzip ${prefix}.${file_extension}" if (options.args.contains("--SAM")) { compression_cmds = """ samtools view $options.args2 -@ ${task.cpus} -bh \\ @@ -68,7 +65,7 @@ process CHROMAP_CHROMAP { } if (meta.single_end) { """ - chromap ${args.join(' ')} \\ + chromap ${final_args} \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -84,7 +81,7 @@ process CHROMAP_CHROMAP { """ } else { """ - chromap ${args.join(' ')} \\ + chromap ${final_args} \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index f97ddeb0..0688949e 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -41,7 +41,7 @@ process DELLY_CALL { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(delly --version 2>&1 | sed 's/^.*Delly //; s/Using.*\$//') + ${getSoftwareName(task.process)}: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') END_VERSIONS """ } diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 32acc4dd..17eaf720 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -59,7 +59,7 @@ process ENSEMBLVEP { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(vep --help 2>&1) + ${getSoftwareName(task.process)}: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') END_VERSIONS """ } diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 7ee97c5a..1c02f404 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -42,7 +42,7 @@ process EXPANSIONHUNTER { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(ExpansionHunter --version 2>&1 | sed 's/^.*ExpansionHunter //') + ${getSoftwareName(task.process)}: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') END_VERSIONS """ } diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index ba099d8d..a3d047a7 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -33,9 +33,10 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { -i $bam \\ $options.args \\ -o ${prefix}.bam + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fgbio --version | sed -e "s/fgbio v//g") + ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 81ac89c2..928765f5 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -35,7 +35,7 @@ process FGBIO_SORTBAM { -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fgbio --version | sed -e "s/fgbio v//g") + ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 60f50932..5cff088b 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -44,9 +44,10 @@ process GLNEXUS { $options.args \\ ${input.join(' ')} \\ > ${prefix}.bcf + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(glnexus_cli 2>&1 | head -n 1 | sed 's/^.*release //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index 0079071f..5a4be104 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -36,7 +36,7 @@ process KLEBORATE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(kleborate -v 2>&1 | sed 's/kleborate //;') + ${getSoftwareName(task.process)}: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) END_VERSIONS """ } diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 518e8264..9ab344fd 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -30,8 +30,9 @@ process MINIA { script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def read_list = reads.join(",") """ - echo "${reads.join("\n")}" > input_files.txt + echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt minia \\ $options.args \\ -nb-cores $task.cpus \\ @@ -40,7 +41,7 @@ process MINIA { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1) | sed 's/^.*Minia version //; s/ .*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') END_VERSIONS """ } diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 8b246c7c..8d5ecd92 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -180,6 +180,7 @@ params { ccs_fq = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/alz.ccs.fastq" ccs_fq_gz = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/alz.ccs.fastq.gz" ccs_xml = "${test_data_dir}/genomics/homo_sapiens/pacbio/xml/alz.ccs.consensusreadset.xml" + hifi = "${test_data_dir}/genomics/homo_sapiens/pacbio/fastq/test_hifi.fastq.gz" lima = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.bam" refine = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.bam" cluster = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.bam" diff --git a/tests/modules/delly/call/test.yml b/tests/modules/delly/call/test.yml index 8faeba78..d8750892 100644 --- a/tests/modules/delly/call/test.yml +++ b/tests/modules/delly/call/test.yml @@ -5,6 +5,6 @@ - delly/call files: - path: output/delly/test.bcf - md5sum: b20df3b9086faccd6bfd2641d97712c8 + md5sum: 360c1bf6867f33bd2a868ddfb4d957fc - path: output/delly/test.bcf.csi md5sum: 19e0cdf06c415f4942f6d4dbd5fb7271 diff --git a/tests/modules/hifiasm/main.nf b/tests/modules/hifiasm/main.nf index aeb64fb2..30614389 100644 --- a/tests/modules/hifiasm/main.nf +++ b/tests/modules/hifiasm/main.nf @@ -10,7 +10,7 @@ include { HIFIASM } from '../../../modules/hifiasm/main.nf' addParams( options: workflow test_hifiasm_hifi_only { input = [ [ id:'test' ], // meta map - [ file(params.test_data['homo_sapiens']['pacbio']['test_hifi_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['homo_sapiens']['pacbio']['hifi'], checkIfExists: true) ] ] HIFIASM ( input, [], [], false ) @@ -22,7 +22,7 @@ workflow test_hifiasm_hifi_only { workflow test_hifiasm_with_parental_reads { input = [ [ id:'test' ], // meta map - [ file(params.test_data['homo_sapiens']['pacbio']['test_hifi_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['homo_sapiens']['pacbio']['hifi'], checkIfExists: true) ] ] paternal_kmer_dump = file(params.test_data['homo_sapiens']['illumina']['test_yak'], checkIfExists: true) maternal_kmer_dump = file(params.test_data['homo_sapiens']['illumina']['test2_yak'], checkIfExists: true) From b932210f270a1fea1f36c181abfb064448572846 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 28 Sep 2021 21:14:11 +0100 Subject: [PATCH 104/314] Fix flash and cat/fastq modules (#759) * Fix version commands: round 3 * Fix seqkit/split2 modules * Fix flash and cat/fastq modules * Remove md5sums on gz files --- modules/cat/fastq/main.nf | 13 ++++++++++++- modules/flash/main.nf | 14 +++++++------- tests/modules/cat/fastq/main.nf | 24 +++++++++++++----------- tests/modules/cat/fastq/test.yml | 6 +++--- tests/modules/flash/main.nf | 9 +++++---- tests/modules/flash/test.yml | 9 +++------ 6 files changed, 43 insertions(+), 32 deletions(-) diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index 55ccca90..712364e1 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,6 +23,7 @@ process CAT_FASTQ { output: tuple val(meta), path("*.merged.fastq.gz"), emit: reads + path "versions.yml" , emit: version script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" @@ -31,6 +32,11 @@ process CAT_FASTQ { if (readList.size > 1) { """ cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + END_VERSIONS """ } } else { @@ -41,6 +47,11 @@ process CAT_FASTQ { """ cat ${read1.sort().join(' ')} > ${prefix}_1.merged.fastq.gz cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + END_VERSIONS """ } } diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 8b8d99e4..f9a381e9 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -21,23 +21,23 @@ process FLASH { tuple val(meta), path(reads) output: - tuple val(meta), path("*.merged.*.fastq.gz"), emit: reads - path "versions.yml" , emit: version + tuple val(meta), path("*.fastq.gz"), emit: reads + path "versions.yml" , emit: version script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def merged = "-o ${prefix}.merged" - def input_reads = "${reads[0]} ${reads[1]}" """ flash \\ $options.args \\ - $merged \\ + -o ${prefix} \\ -z \\ - $input_reads + ${reads[0]} \\ + ${reads[1]} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(flash --version) + ${getSoftwareName(task.process)}: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') END_VERSIONS """ } diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index de11bcf2..027bd108 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -2,24 +2,26 @@ nextflow.enable.dsl = 2 -include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' addParams( options: [:] ) +include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' addParams( options: [publish_dir:'cat'] ) workflow test_cat_fastq_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true) ] + ] CAT_FASTQ ( input ) } workflow test_cat_fastq_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true) ] + ] CAT_FASTQ ( input ) } diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 3a23d309..9a5af25c 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -4,7 +4,7 @@ - cat - cat/fastq files: - - path: ./output/merged_fastq/test.merged.fastq.gz + - path: ./output/cat/test.merged.fastq.gz md5sum: 59f6dbe193741bb40f498f254aeb2e99 - name: cat fastq fastqc_paired_end @@ -13,7 +13,7 @@ - cat - cat/fastq files: - - path: ./output/merged_fastq/test_2.merged.fastq.gz + - path: ./output/cat/test_2.merged.fastq.gz md5sum: d2b1a836eef1058738ecab36c907c5ba - - path: ./output/merged_fastq/test_1.merged.fastq.gz + - path: ./output/cat/test_1.merged.fastq.gz md5sum: 59f6dbe193741bb40f498f254aeb2e99 diff --git a/tests/modules/flash/main.nf b/tests/modules/flash/main.nf index e0f5e623..2128650d 100644 --- a/tests/modules/flash/main.nf +++ b/tests/modules/flash/main.nf @@ -5,10 +5,11 @@ nextflow.enable.dsl = 2 include { FLASH } from '../../../modules/flash/main.nf' addParams( options: [args:'-m 20 -M 100'] ) workflow test_flash { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] FLASH ( input ) } diff --git a/tests/modules/flash/test.yml b/tests/modules/flash/test.yml index 61ea9eab..31cdaeff 100644 --- a/tests/modules/flash/test.yml +++ b/tests/modules/flash/test.yml @@ -3,9 +3,6 @@ tags: - flash files: - - path: output/flash/test.merged.notCombined_2.fastq.gz - md5sum: 96ec044281fe60e0061976d928810314 - - path: output/flash/test.merged.extendedFrags.fastq.gz - md5sum: da20afa705e8ea881e66960bb75607c9 - - path: output/flash/test.merged.notCombined_1.fastq.gz - md5sum: 32451c87f89172c764bec19136592d29 + - path: output/flash/test.notCombined_2.fastq.gz + - path: output/flash/test.extendedFrags.fastq.gz + - path: output/flash/test.notCombined_1.fastq.gz From b2c2d4deb456d92e21777985bb2eda59002748cc Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 29 Sep 2021 14:27:00 +0100 Subject: [PATCH 105/314] Add custom/dumpsoftwareversions modules for nf-core pipeline template (#761) * Add custom/dumpsoftwareversions modules for nf-core pipeline template * Remove md5sums due to differing NF versions --- .../custom/dumpsoftwareversions/functions.nf | 78 +++++++++++++ modules/custom/dumpsoftwareversions/main.nf | 105 ++++++++++++++++++ modules/custom/dumpsoftwareversions/meta.yml | 33 ++++++ tests/config/pytest_modules.yml | 12 +- .../custom/dumpsoftwareversions/main.nf | 24 ++++ .../custom/dumpsoftwareversions/test.yml | 8 ++ 6 files changed, 256 insertions(+), 4 deletions(-) create mode 100644 modules/custom/dumpsoftwareversions/functions.nf create mode 100644 modules/custom/dumpsoftwareversions/main.nf create mode 100644 modules/custom/dumpsoftwareversions/meta.yml create mode 100644 tests/modules/custom/dumpsoftwareversions/main.nf create mode 100644 tests/modules/custom/dumpsoftwareversions/test.yml diff --git a/modules/custom/dumpsoftwareversions/functions.nf b/modules/custom/dumpsoftwareversions/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf new file mode 100644 index 00000000..79e60cb2 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -0,0 +1,105 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CUSTOM_DUMPSOFTWAREVERSIONS { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } + + // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container + conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" + } + + input: + path versions + + output: + path 'software_versions.yml' , emit: yml + path 'software_versions_mqc.yml', emit: mqc_yaml + path 'versions.yml' , emit: versions + + script: + """ + #!/usr/bin/env python + + import yaml + import platform + from textwrap import dedent + + def _make_versions_html(versions): + html = [ + dedent( + '''\\ + + + + + + + + + + ''' + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f'''\\ + + + + + + ''' + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + with open("$versions") as f: + versions = yaml.safe_load(f) + + versions["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version" + } + + versions_mqc = { + 'id': 'software_versions', + 'section_name': '${workflow.manifest.name} Software Versions', + 'section_href': 'https://github.com/${workflow.manifest.name}', + 'plot_type': 'html', + 'description': 'are collected at run time from the software output.', + 'data': _make_versions_html(versions) + } + + with open("software_versions.yml", 'w') as f: + yaml.dump(versions, f, default_flow_style=False) + with open("software_versions_mqc.yml", 'w') as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + yaml_version = {} + yaml_version["${getProcessName(task.process)}"] = { + 'python': platform.python_version(), + 'yaml': yaml.__version__ + } + with open('versions.yml', 'w') as f: + yaml.dump(yaml_version, f, default_flow_style=False) + """ +} diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml new file mode 100644 index 00000000..1cf61615 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -0,0 +1,33 @@ +name: custom_dumpsoftwareversions +description: Custom module used to dump software versions within the nf-core pipeline template +keywords: + - custom + - version +tools: + - custom: + description: Custom module used to dump software versions within the nf-core pipeline template + homepage: https://github.com/nf-core/tools + documentation: https://github.com/nf-core/tools + +input: + - versions: + type: file + description: YML file containing software versions + pattern: "*.yml" + +output: + - yml: + type: file + description: Standard YML file containing software versions + pattern: "software_versions.yml" + - mqc_yml: + type: file + description: MultiQC custom content YML file containing software versions + pattern: "software_versions_mqc.yml" + - version: + type: file + description: File containing software version + pattern: "versions.yml" + +authors: + - "@drpatelh" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 16d4790d..ebe91db0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -258,6 +258,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +custom/dumpsoftwareversions: + - modules/custom/dumpsoftwareversions/** + - tests/modules/custom/dumpsoftwareversions/** + cutadapt: - modules/cutadapt/** - tests/modules/cutadapt/** @@ -286,14 +290,14 @@ delly/call: - modules/delly/call/** - tests/modules/delly/call/** -diamond/blastx: - - modules/diamond/blastx/** - - tests/modules/diamond/blastx/** - diamond/blastp: - modules/diamond/blastp/** - tests/modules/diamond/blastp/** +diamond/blastx: + - modules/diamond/blastx/** + - tests/modules/diamond/blastx/** + diamond/makedb: - modules/diamond/makedb/** - tests/modules/diamond/makedb/** diff --git a/tests/modules/custom/dumpsoftwareversions/main.nf b/tests/modules/custom/dumpsoftwareversions/main.nf new file mode 100644 index 00000000..94dbc5fb --- /dev/null +++ b/tests/modules/custom/dumpsoftwareversions/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FASTQC } from '../../../../modules/fastqc/main.nf' addParams( options: [:] ) +include { MULTIQC } from '../../../../modules/multiqc/main.nf' addParams( options: [:] ) +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' addParams( options: [publish_dir:'custom'] ) + +workflow test_custom_dumpsoftwareversions { + input = [ + [ id: 'test', single_end: false ], + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + FASTQC ( input ) + MULTIQC ( FASTQC.out.zip.collect { it[1] } ) + + ch_software_versions = Channel.empty() + ch_software_versions = ch_software_versions.mix(FASTQC.out.version) + ch_software_versions = ch_software_versions.mix(MULTIQC.out.version) + + CUSTOM_DUMPSOFTWAREVERSIONS ( ch_software_versions.collectFile() ) +} diff --git a/tests/modules/custom/dumpsoftwareversions/test.yml b/tests/modules/custom/dumpsoftwareversions/test.yml new file mode 100644 index 00000000..1815c0ba --- /dev/null +++ b/tests/modules/custom/dumpsoftwareversions/test.yml @@ -0,0 +1,8 @@ +- name: custom dumpsoftwareversions + command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c tests/config/nextflow.config + tags: + - custom + - custom/dumpsoftwareversions + files: + - path: output/custom/software_versions.yml + - path: output/custom/software_versions_mqc.yml From 5a757b2981b634b94015da5969931b96a9f6b8da Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 29 Sep 2021 14:35:43 +0100 Subject: [PATCH 106/314] Fix tyop in custom/dumpsoftwareversions (#762) * Add custom/dumpsoftwareversions modules for nf-core pipeline template * Remove md5sums due to differing NF versions * Fix tyop in custom/dumpsoftwareversions --- modules/custom/dumpsoftwareversions/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index 79e60cb2..94e112f0 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -23,7 +23,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { output: path 'software_versions.yml' , emit: yml - path 'software_versions_mqc.yml', emit: mqc_yaml + path 'software_versions_mqc.yml', emit: mqc_yml path 'versions.yml' , emit: versions script: From 22ec5c6007159d441585ef54bfa6272b6f93c78a Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Thu, 30 Sep 2021 09:00:33 +0100 Subject: [PATCH 107/314] Dump version for /custom/dumpsoftwareversions module itself (#764) --- modules/custom/dumpsoftwareversions/main.nf | 23 +++++++++++---------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index 94e112f0..8424ab07 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -72,10 +72,16 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { html.append("") return "\\n".join(html) - with open("$versions") as f: - versions = yaml.safe_load(f) + module_versions = {} + module_versions["${getProcessName(task.process)}"] = { + 'python': platform.python_version(), + 'yaml': yaml.__version__ + } - versions["Workflow"] = { + with open("$versions") as f: + workflow_versions = yaml.safe_load(f) | module_versions + + workflow_versions["Workflow"] = { "Nextflow": "$workflow.nextflow.version", "$workflow.manifest.name": "$workflow.manifest.version" } @@ -86,20 +92,15 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { 'section_href': 'https://github.com/${workflow.manifest.name}', 'plot_type': 'html', 'description': 'are collected at run time from the software output.', - 'data': _make_versions_html(versions) + 'data': _make_versions_html(workflow_versions) } with open("software_versions.yml", 'w') as f: - yaml.dump(versions, f, default_flow_style=False) + yaml.dump(workflow_versions, f, default_flow_style=False) with open("software_versions_mqc.yml", 'w') as f: yaml.dump(versions_mqc, f, default_flow_style=False) - yaml_version = {} - yaml_version["${getProcessName(task.process)}"] = { - 'python': platform.python_version(), - 'yaml': yaml.__version__ - } with open('versions.yml', 'w') as f: - yaml.dump(yaml_version, f, default_flow_style=False) + yaml.dump(module_versions, f, default_flow_style=False) """ } From 216dc8c984bfc65a5865f9c7b2e0c1bf56c9a973 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Thu, 30 Sep 2021 10:26:52 +0200 Subject: [PATCH 108/314] remove cpu restrictions (#760) Co-authored-by: Harshil Patel --- modules/bowtie/align/main.nf | 5 ++--- modules/bowtie2/align/main.nf | 9 ++++----- modules/bwa/mem/main.nf | 5 ++--- modules/bwamem2/mem/main.nf | 5 ++--- modules/bwameth/align/main.nf | 5 ++--- 5 files changed, 12 insertions(+), 17 deletions(-) diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 060c5fc4..7c71cb82 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -29,7 +29,6 @@ process BOWTIE_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def split_cpus = Math.floor(task.cpus/2) def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' @@ -37,7 +36,7 @@ process BOWTIE_ALIGN { """ INDEX=`find -L ./ -name "*.3.ebwt" | sed 's/.3.ebwt//'` bowtie \\ - --threads ${split_cpus} \\ + --threads $task.cpus \\ --sam \\ -x \$INDEX \\ -q \\ @@ -45,7 +44,7 @@ process BOWTIE_ALIGN { $options.args \\ $endedness \\ 2> ${prefix}.out \\ - | samtools view $options.args2 -@ ${split_cpus} -bS -o ${prefix}.bam - + | samtools view $options.args2 -@ $task.cpus -bS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq ]; then gzip ${prefix}.unmapped.fastq diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 00bcf83c..4a972373 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -29,7 +29,6 @@ process BOWTIE2_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def split_cpus = Math.floor(task.cpus/2) def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { @@ -39,11 +38,11 @@ process BOWTIE2_ALIGN { bowtie2 \\ -x \$INDEX \\ -U $reads \\ - --threads $split_cpus \\ + --threads $task.cpus \\ $unaligned \\ $options.args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ ${split_cpus} $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: @@ -60,11 +59,11 @@ process BOWTIE2_ALIGN { -x \$INDEX \\ -1 ${reads[0]} \\ -2 ${reads[1]} \\ - --threads $split_cpus \\ + --threads $task.cpus \\ $unaligned \\ $options.args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ ${split_cpus} $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq.1.gz ]; then mv ${prefix}.unmapped.fastq.1.gz ${prefix}.unmapped_1.fastq.gz diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index a081a69a..f20e0c39 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -27,7 +27,6 @@ process BWA_MEM { path "versions.yml" , emit: version script: - def split_cpus = Math.floor(task.cpus/2) def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" @@ -37,10 +36,10 @@ process BWA_MEM { bwa mem \\ $options.args \\ $read_group \\ - -t $split_cpus \\ + -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - + | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index d21b8b99..ea584a39 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -27,7 +27,6 @@ process BWAMEM2_MEM { path "versions.yml" , emit: version script: - def split_cpus = Math.floor(task.cpus/2) def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" @@ -38,10 +37,10 @@ process BWAMEM2_MEM { mem \\ $options.args \\ $read_group \\ - -t $split_cpus \\ + -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - + | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 814faa2b..d78055fc 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -27,7 +27,6 @@ process BWAMETH_ALIGN { path "versions.yml" , emit: version script: - def split_cpus = Math.floor(task.cpus/2) def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" @@ -37,10 +36,10 @@ process BWAMETH_ALIGN { bwameth.py \\ $options.args \\ $read_group \\ - -t ${split_cpus} \\ + -t $task.cpus \\ --reference \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ ${split_cpus} -bhS -o ${prefix}.bam - + | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From 01cc326c2326d17615857953d81c586e2a3e5c60 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 30 Sep 2021 12:54:16 +0200 Subject: [PATCH 109/314] add Amps (#768) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * Start work, continue once on non-mobile internet * finished and working on conda * Update modules/amps/main.nf Co-authored-by: Jose Espinosa-Carrasco * Apply suggestions from code review Co-authored-by: Jose Espinosa-Carrasco Co-authored-by: Jose Espinosa-Carrasco --- modules/amps/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/amps/main.nf | 47 ++++++++++++++++++++ modules/amps/meta.yml | 66 ++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/amps/main.nf | 32 ++++++++++++++ tests/modules/amps/test.yml | 11 +++++ 6 files changed, 238 insertions(+) create mode 100644 modules/amps/functions.nf create mode 100644 modules/amps/main.nf create mode 100644 modules/amps/meta.yml create mode 100644 tests/modules/amps/main.nf create mode 100644 tests/modules/amps/test.yml diff --git a/modules/amps/functions.nf b/modules/amps/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/amps/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/amps/main.nf b/modules/amps/main.nf new file mode 100644 index 00000000..676435f1 --- /dev/null +++ b/modules/amps/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process AMPS { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::hops=0.35" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" + } else { + container "quay.io/biocontainers/hops:0.35--hdfd78af_1" + } + + input: + path maltextract_results + path taxon_list + val filter + + output: + path "results/heatmap_overview_Wevid.json" , emit: json + path "results/heatmap_overview_Wevid.pdf" , emit: summary_pdf + path "results/heatmap_overview_Wevid.tsv" , emit: tsv + path "results/pdf_candidate_profiles/" , emit: candidate_pdfs + path "versions.yml" , emit: version + + script: + """ + postprocessing.AMPS.r \\ + -r $maltextract_results \\ + -n $taxon_list \\ + -m $filter \\ + -t $task.cpus \\ + -j \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + amps: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') + END_VERSIONS + """ +} diff --git a/modules/amps/meta.yml b/modules/amps/meta.yml new file mode 100644 index 00000000..62844f6a --- /dev/null +++ b/modules/amps/meta.yml @@ -0,0 +1,66 @@ +name: amps +description: Post-processing script of the MaltExtract component of the HOPS package +keywords: + - malt + - MaltExtract + - HOPS + - amps + - alignment + - metagenomics + - ancient DNA + - aDNA + - palaeogenomics + - archaeogenomics + - microbiome + - authentication + - damage + - edit distance + - post Post-processing + - visualisation +tools: + - amps: + description: Post-processing script of the MaltExtract tool for ancient metagenomics + homepage: "https://github.com/rhuebler/HOPS" + documentation: "https://github.com/keyfm/amps" + tool_dev_url: "https://github.com/keyfm/amps" + doi: "10.1186/s13059-019-1903-0" + licence: ['GPL >=3'] + +input: + - maltextract_results: + type: directory + description: MaltExtract output directory + pattern: "results/" + - taxon_list: + type: file + description: List of target taxa to evaluate used in MaltExtract + pattern: "*.txt" + - filter: + type: string + description: The filter mode used in MaltExtract + pattern: "def_anc|default|scan|ancient|crawl" + +output: + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - json: + type: file + description: Candidate summary heatmap in MultiQC compatible JSON format + pattern: "heatmap_overview_Wevid.json" + - summary_pdf: + type: file + description: Candidate summary heatmap in PDF format + pattern: "heatmap_overview_Wevid.pdf" + - tsv: + type: file + description: Candidate summary heatmap in TSV format + pattern: "heatmap_overview_Wevid.tsv" + - candidate_pdfs: + type: directory + description: Directory of per sample output PDFs organised by reference + pattern: "pdf_candidate_profiles/" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ebe91db0..3dda1d94 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -14,6 +14,10 @@ allelecounter: - modules/allelecounter/** - tests/modules/allelecounter/** +amps: + - modules/amps/** + - tests/modules/amps/** + arriba: - modules/arriba/** - tests/modules/arriba/** diff --git a/tests/modules/amps/main.nf b/tests/modules/amps/main.nf new file mode 100644 index 00000000..7d7a40d1 --- /dev/null +++ b/tests/modules/amps/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [args: "-f def_anc"] ) +include { AMPS } from '../../../modules/amps/main.nf' addParams( options: [:] ) + + +workflow test_amps { + + fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + gff = [] + seq_type = "DNA" + map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + mode = "BlastN" + taxon_list = file(params.test_data['sarscov2']['genome']['taxon_list_txt'], checkIfExists: true) + ncbi_dir = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) + filter = "def_anc" + + UNZIP_MALT ( map_db ) + UNZIP_MALTEXTRACT ( ncbi_dir ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive ) + MALT_RUN ( input, mode, MALT_BUILD.out.index ) + MALTEXTRACT ( MALT_RUN.out.rma6, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive) + + AMPS ( MALTEXTRACT.out.results, taxon_list, filter ) +} diff --git a/tests/modules/amps/test.yml b/tests/modules/amps/test.yml new file mode 100644 index 00000000..04691f18 --- /dev/null +++ b/tests/modules/amps/test.yml @@ -0,0 +1,11 @@ +- name: amps + command: nextflow run ./tests/modules/amps -entry test_amps -c tests/config/nextflow.config + tags: + - amps + files: + - path: output/amps/results/heatmap_overview_Wevid.json + md5sum: 82f484d02a9e3d0cc3d5bcdcc2965e44 + - path: output/amps/results/heatmap_overview_Wevid.pdf + - path: output/amps/results/heatmap_overview_Wevid.tsv + md5sum: 1a7d565a37ef4d6054f7ade63fbadc2f + - path: output/amps/results/pdf_candidate_profiles/Severe_acute_respiratory_syndrome_coronavirus_2/stp1_test_1.rma6_Severe_acute_respiratory_syndrome_coronavirus_2_summary.pdf From 5b1cea7f7f38b78a8fd8e2b90b1b877bd117ed96 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 30 Sep 2021 14:37:35 +0200 Subject: [PATCH 110/314] Add bbmap/bbsplit module (#771) * Add bbmap/bbsplit module * Conda complains about md5sum * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/bbmap/bbsplit/functions.nf | 78 ++++++++++++++++++++++ modules/bbmap/bbsplit/main.nf | 96 ++++++++++++++++++++++++++++ modules/bbmap/bbsplit/meta.yml | 75 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bbmap/bbsplit/main.nf | 22 +++++++ tests/modules/bbmap/bbsplit/test.yml | 24 +++++++ 6 files changed, 299 insertions(+) create mode 100644 modules/bbmap/bbsplit/functions.nf create mode 100644 modules/bbmap/bbsplit/main.nf create mode 100644 modules/bbmap/bbsplit/meta.yml create mode 100644 tests/modules/bbmap/bbsplit/main.nf create mode 100644 tests/modules/bbmap/bbsplit/test.yml diff --git a/modules/bbmap/bbsplit/functions.nf b/modules/bbmap/bbsplit/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bbmap/bbsplit/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf new file mode 100644 index 00000000..614a4c02 --- /dev/null +++ b/modules/bbmap/bbsplit/main.nf @@ -0,0 +1,96 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BBMAP_BBSPLIT { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bbmap=38.93" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0" + } else { + container "quay.io/biocontainers/bbmap:38.93--he522d1c_0" + } + + input: + tuple val(meta), path(reads) + path index + path primary_ref + tuple val(other_ref_names), path (other_ref_paths) + val only_build_index + + output: + path "bbsplit" , optional:true, emit: index + tuple val(meta), path('*primary*fastq.gz'), optional:true, emit: primary_fastq + tuple val(meta), path('*fastq.gz') , optional:true, emit: all_fastq + tuple val(meta), path('*txt') , optional:true, emit: stats + path "versions.yml" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + def avail_mem = 3 + if (!task.memory) { + log.info '[BBSplit] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + + def other_refs = [] + other_ref_names.eachWithIndex { name, index -> + other_refs << "ref_${name}=${other_ref_paths[index]}" + } + if (only_build_index) { + if (primary_ref && other_ref_names && other_ref_paths) { + """ + bbsplit.sh \\ + -Xmx${avail_mem}g \\ + ref_primary=$primary_ref \\ + ${other_refs.join(' ')} \\ + path=bbsplit \\ + threads=$task.cpus \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + END_VERSIONS + """ + } else { + log.error 'ERROR: Please specify as input a primary fasta file along with names and paths to non-primary fasta files.' + } + } else { + def index_files = '' + if (index) { + index_files = "path=$index" + } else if (primary_ref && other_ref_names && other_ref_paths) { + index_files = "ref_primary=${primary_ref} ${other_refs.join(' ')}" + } else { + log.error 'ERROR: Please either specify a BBSplit index as input or a primary fasta file along with names and paths to non-primary fasta files.' + } + def fastq_in = meta.single_end ? "in=${reads}" : "in=${reads[0]} in2=${reads[1]}" + def fastq_out = meta.single_end ? "basename=${prefix}_%.fastq.gz" : "basename=${prefix}_%_#.fastq.gz" + """ + bbsplit.sh \\ + -Xmx${avail_mem}g \\ + $index_files \\ + threads=$task.cpus \\ + $fastq_in \\ + $fastq_out \\ + refstats=${prefix}.stats.txt \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + END_VERSIONS + """ + } +} diff --git a/modules/bbmap/bbsplit/meta.yml b/modules/bbmap/bbsplit/meta.yml new file mode 100644 index 00000000..2eb3a6c9 --- /dev/null +++ b/modules/bbmap/bbsplit/meta.yml @@ -0,0 +1,75 @@ +name: bbmap_bbsplit +description: write your description here +keywords: + - align + - map + - genome + - reference +tools: + - bbmap: + description: BBMap is a short read aligner, as well as various other bioinformatic tools. + homepage: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + documentation: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + tool_dev_url: None + doi: "" + licence: ['UC-LBL license (see package)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - index: + type: directory + description: Directory to place generated index + pattern: "*" + - primary_ref: + type: path + description: Path to the primary reference + pattern: "*" + - other_ref_names: + type: list + description: List of other reference ids apart from the primary + - other_ref_paths: + type: list + description: Path to other references paths corresponding to "other_ref_names" + - only_build_index: + type: string + description: true = only build index; false = mapping + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - index: + type: directory + description: Directory with index files + pattern: "bbsplit" + - primary_fastq: + type: file + description: Output reads that map to the primary reference + pattern: "*primary*fastq.gz" + - all_fastq: + type: file + description: All reads mapping to any of the references + pattern: "*fastq.gz" + - stats: + type: file + description: Tab-delimited text file containing mapping statistics + pattern: "*.txt" + +authors: + - "@joseespinosa" + - "@drpatelh" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 3dda1d94..63152fe0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -46,6 +46,10 @@ bbmap/bbduk: - modules/bbmap/bbduk/** - tests/modules/bbmap/bbduk/** +bbmap/bbsplit: + - modules/bbmap/bbsplit/** + - tests/modules/bbmap/bbsplit/** + bbmap/index: - modules/bbmap/index/** - tests/modules/bbmap/index/** diff --git a/tests/modules/bbmap/bbsplit/main.nf b/tests/modules/bbmap/bbsplit/main.nf new file mode 100644 index 00000000..1d3c30c1 --- /dev/null +++ b/tests/modules/bbmap/bbsplit/main.nf @@ -0,0 +1,22 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) + +workflow test_bbmap_bbsplit { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + bbsplit_fasta_list = [ + ['human'], + file('https://raw.githubusercontent.com/nf-core/test-datasets/rnaseq/reference/chr22_23800000-23980000.fa', checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BBMAP_BBSPLIT_INDEX ( [ [:], [] ], [], fasta, bbsplit_fasta_list, true ) + BBMAP_BBSPLIT_SPLIT ( input, BBMAP_BBSPLIT_INDEX.out.index, fasta, bbsplit_fasta_list, true ) +} diff --git a/tests/modules/bbmap/bbsplit/test.yml b/tests/modules/bbmap/bbsplit/test.yml new file mode 100644 index 00000000..87bdebea --- /dev/null +++ b/tests/modules/bbmap/bbsplit/test.yml @@ -0,0 +1,24 @@ +- name: bbmap bbsplit test_bbmap_bbsplit + command: nextflow run tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c tests/config/nextflow.config + tags: + - bbmap/bbsplit + - bbmap + files: + - path: output/bbmap/bbsplit/ref/genome/1/chr1.chrom.gz + - path: output/bbmap/bbsplit/ref/genome/1/info.txt + contains: + - 'Chromosome' + - path: output/bbmap/bbsplit/ref/genome/1/merged_ref_9222711925172838098.fa.gz + - path: output/bbmap/bbsplit/ref/genome/1/namelist.txt + md5sum: 45e7a4cdc7a11a39ada56844ca3a1e30 + - path: output/bbmap/bbsplit/ref/genome/1/reflist.txt + contains: + - 'genome.fasta' + - path: output/bbmap/bbsplit/ref/genome/1/scaffolds.txt.gz + - path: output/bbmap/bbsplit/ref/genome/1/summary.txt + contains: + - 'scaffolds' + - path: output/bbmap/bbsplit/ref/index/1/chr1_index_k13_c13_b1.block + md5sum: 385913c1e84b77dc7bf36288ee1c8706 + - path: output/bbmap/bbsplit/ref/index/1/chr1_index_k13_c13_b1.block2.gz + md5sum: 9de572b603abe5b6540056db8dee05a5 From de1453396489895eecbdfd72d101a85743f8bea1 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 1 Oct 2021 13:29:02 +0100 Subject: [PATCH 111/314] Fix version command for qualimap/rnaseq (#779) --- modules/qualimap/rnaseq/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index d1ed1021..fa32a6a7 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -54,7 +54,7 @@ process QUALIMAP_RNASEQ { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(qualimap 2>&1 | sed 's/^.*QualiMap v.//; s/Built.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } From 7b3315591a149609e27914965f858c9a7e071564 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 1 Oct 2021 14:04:56 +0100 Subject: [PATCH 112/314] Remove def software lines and emit versions channel as plural (#780) * Remove def software line * Replace version with versions in emit statement * Fix default software names --- modules/abacas/main.nf | 3 +-- modules/adapterremoval/main.nf | 3 +-- modules/agrvate/main.nf | 3 +-- modules/allelecounter/main.nf | 3 +-- modules/amps/main.nf | 4 ++-- modules/arriba/main.nf | 3 +-- modules/artic/guppyplex/main.nf | 3 +-- modules/artic/minion/main.nf | 3 +-- modules/bamaligncleaner/main.nf | 3 +-- modules/bandage/image/main.nf | 3 +-- modules/bbmap/align/main.nf | 3 +-- modules/bbmap/bbduk/main.nf | 3 +-- modules/bbmap/bbsplit/main.nf | 3 +-- modules/bbmap/index/main.nf | 3 +-- modules/bcftools/concat/main.nf | 3 +-- modules/bcftools/consensus/main.nf | 3 +-- modules/bcftools/filter/main.nf | 3 +-- modules/bcftools/isec/main.nf | 3 +-- modules/bcftools/merge/main.nf | 3 +-- modules/bcftools/mpileup/main.nf | 3 +-- modules/bcftools/norm/main.nf | 3 +-- modules/bcftools/query/main.nf | 3 +-- modules/bcftools/reheader/main.nf | 3 +-- modules/bcftools/stats/main.nf | 3 +-- modules/bcftools/view/main.nf | 3 +-- modules/bedtools/bamtobed/main.nf | 3 +-- modules/bedtools/complement/main.nf | 3 +-- modules/bedtools/genomecov/main.nf | 3 +-- modules/bedtools/getfasta/main.nf | 3 +-- modules/bedtools/intersect/main.nf | 3 +-- modules/bedtools/makewindows/main.nf | 3 +-- modules/bedtools/maskfasta/main.nf | 3 +-- modules/bedtools/merge/main.nf | 3 +-- modules/bedtools/slop/main.nf | 3 +-- modules/bedtools/sort/main.nf | 3 +-- modules/bedtools/subtract/main.nf | 3 +-- modules/bismark/align/main.nf | 3 +-- modules/bismark/deduplicate/main.nf | 3 +-- modules/bismark/genomepreparation/main.nf | 3 +-- modules/bismark/methylationextractor/main.nf | 3 +-- modules/bismark/report/main.nf | 3 +-- modules/bismark/summary/main.nf | 3 +-- modules/blast/blastn/main.nf | 3 +-- modules/blast/makeblastdb/main.nf | 3 +-- modules/bowtie/align/main.nf | 3 +-- modules/bowtie/build/main.nf | 3 +-- modules/bowtie2/align/main.nf | 3 +-- modules/bowtie2/build/main.nf | 3 +-- modules/bwa/aln/main.nf | 3 +-- modules/bwa/index/main.nf | 3 +-- modules/bwa/mem/main.nf | 3 +-- modules/bwa/sampe/main.nf | 3 +-- modules/bwa/samse/main.nf | 3 +-- modules/bwamem2/index/main.nf | 3 +-- modules/bwamem2/mem/main.nf | 3 +-- modules/bwameth/align/main.nf | 3 +-- modules/bwameth/index/main.nf | 3 +-- modules/cat/cat/main.nf | 2 +- modules/cat/fastq/main.nf | 2 +- modules/chromap/chromap/main.nf | 3 +-- modules/chromap/index/main.nf | 3 +-- modules/cnvkit/main.nf | 3 +-- modules/cooler/digest/main.nf | 3 +-- modules/cooler/dump/main.nf | 3 +-- modules/custom/dumpsoftwareversions/main.nf | 6 +++--- modules/cutadapt/main.nf | 3 +-- modules/damageprofiler/main.nf | 3 +-- modules/deeptools/computematrix/main.nf | 3 +-- modules/deeptools/plotfingerprint/main.nf | 3 +-- modules/deeptools/plotheatmap/main.nf | 3 +-- modules/deeptools/plotprofile/main.nf | 3 +-- modules/delly/call/main.nf | 3 +-- modules/diamond/blastp/main.nf | 3 +-- modules/diamond/blastx/main.nf | 3 +-- modules/diamond/makedb/main.nf | 3 +-- modules/dragonflye/main.nf | 3 +-- modules/dshbio/exportsegments/main.nf | 3 +-- modules/dshbio/filterbed/main.nf | 3 +-- modules/dshbio/filtergff3/main.nf | 3 +-- modules/dshbio/splitbed/main.nf | 3 +-- modules/dshbio/splitgff3/main.nf | 3 +-- modules/ensemblvep/main.nf | 3 +-- modules/expansionhunter/main.nf | 3 +-- modules/fastani/main.nf | 3 +-- modules/fastp/main.nf | 3 +-- modules/fastqc/main.nf | 8 ++++---- modules/fasttree/main.nf | 3 +-- modules/fgbio/callmolecularconsensusreads/main.nf | 3 +-- modules/fgbio/sortbam/main.nf | 3 +-- modules/flash/main.nf | 3 +-- modules/gatk4/applybqsr/main.nf | 3 +-- modules/gatk4/baserecalibrator/main.nf | 3 +-- modules/gatk4/bedtointervallist/main.nf | 3 +-- modules/gatk4/createsequencedictionary/main.nf | 3 +-- modules/gatk4/fastqtosam/main.nf | 3 +-- modules/gatk4/getpileupsummaries/main.nf | 3 +-- modules/gatk4/haplotypecaller/main.nf | 3 +-- modules/gatk4/intervallisttools/main.nf | 3 +-- modules/gatk4/markduplicates/main.nf | 3 +-- modules/gatk4/mergebamalignment/main.nf | 3 +-- modules/gatk4/mergevcfs/main.nf | 3 +-- modules/gatk4/mutect2/main.nf | 3 +-- modules/gatk4/revertsam/main.nf | 3 +-- modules/gatk4/samtofastq/main.nf | 3 +-- modules/gatk4/splitncigarreads/main.nf | 3 +-- modules/gatk4/variantfiltration/main.nf | 3 +-- modules/genmap/index/main.nf | 3 +-- modules/genmap/mappability/main.nf | 3 +-- modules/gffread/main.nf | 3 +-- modules/glnexus/main.nf | 3 +-- modules/graphmap2/align/main.nf | 3 +-- modules/graphmap2/index/main.nf | 3 +-- modules/gubbins/main.nf | 3 +-- modules/gunzip/main.nf | 3 +-- modules/hifiasm/main.nf | 3 +-- modules/hisat2/align/main.nf | 3 +-- modules/hisat2/build/main.nf | 3 +-- modules/hisat2/extractsplicesites/main.nf | 3 +-- modules/hmmer/hmmalign/main.nf | 3 +-- modules/homer/annotatepeaks/main.nf | 3 +-- modules/homer/findpeaks/main.nf | 3 +-- modules/homer/maketagdirectory/main.nf | 3 +-- modules/homer/makeucscfile/main.nf | 3 +-- modules/iqtree/main.nf | 3 +-- modules/ivar/consensus/main.nf | 3 +-- modules/ivar/trim/main.nf | 3 +-- modules/ivar/variants/main.nf | 3 +-- modules/kallisto/index/main.nf | 3 +-- modules/kallistobustools/count/main.nf | 3 +-- modules/kallistobustools/ref/main.nf | 3 +-- modules/kleborate/main.nf | 3 +-- modules/kraken2/kraken2/main.nf | 3 +-- modules/last/dotplot/main.nf | 3 +-- modules/last/lastal/main.nf | 3 +-- modules/last/lastdb/main.nf | 3 +-- modules/last/mafconvert/main.nf | 3 +-- modules/last/mafswap/main.nf | 3 +-- modules/last/postmask/main.nf | 3 +-- modules/last/split/main.nf | 3 +-- modules/last/train/main.nf | 3 +-- modules/lima/main.nf | 5 ++--- modules/lofreq/call/main.nf | 3 +-- modules/lofreq/callparallel/main.nf | 3 +-- modules/lofreq/filter/main.nf | 3 +-- modules/lofreq/indelqual/main.nf | 3 +-- modules/macs2/callpeak/main.nf | 3 +-- modules/malt/build/main.nf | 3 +-- modules/malt/run/main.nf | 3 +-- modules/maltextract/main.nf | 3 +-- modules/mash/sketch/main.nf | 3 +-- modules/metaphlan3/main.nf | 3 +-- modules/methyldackel/extract/main.nf | 3 +-- modules/methyldackel/mbias/main.nf | 3 +-- modules/minia/main.nf | 3 +-- modules/minimap2/align/main.nf | 3 +-- modules/minimap2/index/main.nf | 3 +-- modules/mosdepth/main.nf | 3 +-- modules/msisensor/msi/main.nf | 3 +-- modules/msisensor/scan/main.nf | 7 +++---- modules/multiqc/main.nf | 5 ++--- modules/muscle/main.nf | 3 +-- modules/nanolyse/main.nf | 3 +-- modules/nanoplot/main.nf | 3 +-- modules/nextclade/main.nf | 3 +-- modules/optitype/main.nf | 3 +-- modules/pairix/main.nf | 3 +-- modules/pairtools/dedup/main.nf | 3 +-- modules/pairtools/flip/main.nf | 3 +-- modules/pairtools/parse/main.nf | 3 +-- modules/pairtools/restrict/main.nf | 3 +-- modules/pairtools/select/main.nf | 3 +-- modules/pairtools/sort/main.nf | 3 +-- modules/pangolin/main.nf | 3 +-- modules/pbccs/main.nf | 4 +--- modules/phantompeakqualtools/main.nf | 3 +-- modules/picard/collectmultiplemetrics/main.nf | 3 +-- modules/picard/collectwgsmetrics/main.nf | 3 +-- modules/picard/filtersamreads/main.nf | 3 +-- modules/picard/markduplicates/main.nf | 3 +-- modules/picard/mergesamfiles/main.nf | 3 +-- modules/picard/sortsam/main.nf | 3 +-- modules/plasmidid/main.nf | 3 +-- modules/plink/vcf/main.nf | 5 ++--- modules/preseq/lcextrap/main.nf | 3 +-- modules/prodigal/main.nf | 3 +-- modules/prokka/main.nf | 3 +-- modules/pycoqc/main.nf | 3 +-- modules/pydamage/analyze/main.nf | 3 +-- modules/pydamage/filter/main.nf | 3 +-- modules/qcat/main.nf | 3 +-- modules/qualimap/bamqc/main.nf | 3 +-- modules/qualimap/rnaseq/main.nf | 3 +-- modules/quast/main.nf | 3 +-- modules/rapidnj/main.nf | 3 +-- modules/rasusa/main.nf | 3 +-- modules/raxmlng/main.nf | 3 +-- modules/rsem/calculateexpression/main.nf | 3 +-- modules/rsem/preparereference/main.nf | 3 +-- modules/rseqc/bamstat/main.nf | 3 +-- modules/rseqc/inferexperiment/main.nf | 3 +-- modules/rseqc/innerdistance/main.nf | 3 +-- modules/rseqc/junctionannotation/main.nf | 3 +-- modules/rseqc/junctionsaturation/main.nf | 3 +-- modules/rseqc/readdistribution/main.nf | 3 +-- modules/rseqc/readduplication/main.nf | 3 +-- modules/salmon/index/main.nf | 3 +-- modules/salmon/quant/main.nf | 3 +-- modules/samtools/ampliconclip/main.nf | 3 +-- modules/samtools/faidx/main.nf | 3 +-- modules/samtools/fastq/main.nf | 3 +-- modules/samtools/flagstat/main.nf | 3 +-- modules/samtools/idxstats/main.nf | 3 +-- modules/samtools/index/main.nf | 3 +-- modules/samtools/merge/main.nf | 3 +-- modules/samtools/mpileup/main.nf | 3 +-- modules/samtools/sort/main.nf | 3 +-- modules/samtools/stats/main.nf | 3 +-- modules/samtools/view/main.nf | 3 +-- modules/seacr/callpeak/main.nf | 3 +-- modules/seqkit/split2/main.nf | 3 +-- modules/seqtk/sample/main.nf | 3 +-- modules/seqtk/subseq/main.nf | 3 +-- modules/sequenzautils/bam2seqz/main.nf | 3 +-- modules/sequenzautils/gcwiggle/main.nf | 3 +-- modules/seqwish/induce/main.nf | 3 +-- modules/shovill/main.nf | 3 +-- modules/snpdists/main.nf | 3 +-- modules/snpeff/main.nf | 3 +-- modules/snpsites/main.nf | 3 +-- modules/sortmerna/main.nf | 3 +-- modules/spades/main.nf | 3 +-- modules/staphopiasccmec/main.nf | 3 +-- modules/star/align/main.nf | 3 +-- modules/star/genomegenerate/main.nf | 3 +-- modules/strelka/germline/main.nf | 5 ++--- modules/stringtie/merge/main.nf | 3 +-- modules/stringtie/stringtie/main.nf | 3 +-- modules/subread/featurecounts/main.nf | 3 +-- modules/tabix/bgzip/main.nf | 3 +-- modules/tabix/bgziptabix/main.nf | 3 +-- modules/tabix/tabix/main.nf | 3 +-- modules/tiddit/sv/main.nf | 3 +-- modules/trimgalore/main.nf | 3 +-- modules/ucsc/bed12tobigbed/main.nf | 3 +-- modules/ucsc/bedclip/main.nf | 3 +-- modules/ucsc/bedgraphtobigwig/main.nf | 3 +-- modules/ucsc/bigwigaverageoverbed/main.nf | 3 +-- modules/ucsc/wigtobigwig/main.nf | 3 +-- modules/umitools/dedup/main.nf | 3 +-- modules/umitools/extract/main.nf | 3 +-- modules/unicycler/main.nf | 3 +-- modules/untar/main.nf | 3 +-- modules/unzip/main.nf | 3 +-- modules/variantbam/main.nf | 3 +-- modules/vcftools/main.nf | 7 +++---- modules/yara/index/main.nf | 3 +-- modules/yara/mapper/main.nf | 3 +-- 257 files changed, 271 insertions(+), 524 deletions(-) diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index 307e17d2..bc5440b1 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -24,10 +24,9 @@ process ABACAS { output: tuple val(meta), path('*.abacas*'), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ abacas.pl \\ diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index cbf0957a..fad3963f 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -23,10 +23,9 @@ process ADAPTERREMOVAL { output: tuple val(meta), path('*.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index 3ca2e0f4..c1a6748e 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -24,10 +24,9 @@ process AGRVATE { output: tuple val(meta), path("${fasta.baseName}-results/${fasta.baseName}-summary.tab"), emit: summary path "${fasta.baseName}-results" , emit: results_dir - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ agrvate \\ diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 31ef3f79..5184df7d 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -24,10 +24,9 @@ process ALLELECOUNTER { output: tuple val(meta), path("*.alleleCount"), emit: allelecount - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ alleleCounter \\ diff --git a/modules/amps/main.nf b/modules/amps/main.nf index 676435f1..f34423b5 100644 --- a/modules/amps/main.nf +++ b/modules/amps/main.nf @@ -27,7 +27,7 @@ process AMPS { path "results/heatmap_overview_Wevid.pdf" , emit: summary_pdf path "results/heatmap_overview_Wevid.tsv" , emit: tsv path "results/pdf_candidate_profiles/" , emit: candidate_pdfs - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: """ @@ -41,7 +41,7 @@ process AMPS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - amps: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') + ${getSoftwareName(task.process)}: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') END_VERSIONS """ } diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index b94c22d9..6abae233 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -26,10 +26,9 @@ process ARRIBA { output: tuple val(meta), path("*.fusions.tsv") , emit: fusions tuple val(meta), path("*.fusions.discarded.tsv"), emit: fusions_fail - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def blacklist = (options.args.contains('-b')) ? '' : '-f blacklist' """ diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index 5f91e9e3..87bd99c8 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -23,10 +23,9 @@ process ARTIC_GUPPYPLEX { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ artic \\ diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 2f810ecf..68474f19 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -40,10 +40,9 @@ process ARTIC_MINION { tuple val(meta), path("${prefix}.pass.vcf.gz") , emit: vcf tuple val(meta), path("${prefix}.pass.vcf.gz.tbi") , emit: tbi tuple val(meta), path("*.json"), optional:true , emit: json - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') def fast5 = params.fast5_dir ? "--fast5-directory $fast5_dir" : "" diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index 7372f274..720b495a 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -23,10 +23,9 @@ process BAMALIGNCLEANER { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index d15d4826..b7a30a0b 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -24,10 +24,9 @@ process BANDAGE_IMAGE { output: tuple val(meta), path('*.png'), emit: png tuple val(meta), path('*.svg'), emit: svg - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ Bandage image $gfa ${prefix}.png $options.args diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 63989be0..733fd4d5 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -24,10 +24,9 @@ process BBMAP_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index 4f1540dc..d7243fdb 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -24,10 +24,9 @@ process BBMAP_BBDUK { output: tuple val(meta), path('*.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index 614a4c02..7a24312b 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -30,10 +30,9 @@ process BBMAP_BBSPLIT { tuple val(meta), path('*primary*fastq.gz'), optional:true, emit: primary_fastq tuple val(meta), path('*fastq.gz') , optional:true, emit: all_fastq tuple val(meta), path('*txt') , optional:true, emit: stats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index 6f957d03..b9e52ec7 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -23,10 +23,9 @@ process BBMAP_INDEX { output: path 'ref' , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ bbmap.sh \\ ref=${fasta} \\ diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index fab0e83d..48280eea 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_CONCAT { output: tuple val(meta), path("*.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools concat \\ diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 29758a4b..954b0eb8 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_CONSENSUS { output: tuple val(meta), path('*.fa'), emit: fasta - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ cat $fasta | bcftools consensus $vcf $options.args > ${prefix}.fa diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 37b7e28b..5323e0fb 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_FILTER { output: tuple val(meta), path("*.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools filter \\ diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index f700f35c..cc3e425e 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_ISEC { output: tuple val(meta), path("${prefix}"), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools isec \\ diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index 7d8ab670..bb68f184 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_MERGE { output: tuple val(meta), path("*.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools merge -Oz \\ diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index 1f6eecaa..df8455a5 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -26,10 +26,9 @@ process BCFTOOLS_MPILEUP { tuple val(meta), path("*.gz") , emit: vcf tuple val(meta), path("*.tbi") , emit: tbi tuple val(meta), path("*stats.txt"), emit: stats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ echo "${meta.id}" > sample_name.list diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 454fc1d2..7e506e49 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -24,10 +24,9 @@ process BCFTOOLS_NORM { output: tuple val(meta), path("*.gz") , emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools norm \\ diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index 4815ae90..dae8bbc4 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -26,10 +26,9 @@ process BCFTOOLS_QUERY { output: tuple val(meta), path("*.gz") , emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index a949b6e9..953a8adb 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -25,10 +25,9 @@ process BCFTOOLS_REHEADER { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index f5b1f6b1..31bed814 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -23,10 +23,9 @@ process BCFTOOLS_STATS { output: tuple val(meta), path("*stats.txt"), emit: stats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bcftools stats $options.args $vcf > ${prefix}.bcftools_stats.txt diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index 5a944e89..ef72f081 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -26,10 +26,9 @@ process BCFTOOLS_VIEW { output: tuple val(meta), path("*.gz") , emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index 19986371..71c439d3 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -23,10 +23,9 @@ process BEDTOOLS_BAMTOBED { output: tuple val(meta), path("*.bed"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index 5b3bbea9..77214c64 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_COMPLEMENT { output: tuple val(meta), path('*.bed'), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index b5deedf1..9d014466 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -25,10 +25,9 @@ process BEDTOOLS_GENOMECOV { output: tuple val(meta), path("*.${extension}"), emit: genomecov - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (intervals.name =~ /\.bam/) { """ diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index 72e457dc..b27f6183 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_GETFASTA { output: path "*.fa" , emit: fasta - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${bed.baseName}${options.suffix}" : "${bed.baseName}" """ bedtools \\ diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index b75bd116..1ab0a8b2 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_INTERSECT { output: tuple val(meta), path("*.${extension}"), emit: intersect - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index 5e93f0ae..c9f863d0 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_MAKEWINDOWS { output: tuple val(meta), path("*.tab"), emit: tab - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 67097f3f..8ee33d7a 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_MASKFASTA { output: tuple val(meta), path("*.fa"), emit: fasta - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index ba8348af..92a59f9e 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -23,10 +23,9 @@ process BEDTOOLS_MERGE { output: tuple val(meta), path('*.bed'), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index 6644b8db..4b412b1f 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -24,10 +24,9 @@ process BEDTOOLS_SLOP { output: tuple val(meta), path("*.bed"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index acc4a593..bdba3376 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -23,10 +23,9 @@ process BEDTOOLS_SORT { output: tuple val(meta), path('*.bed'), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index a8e2ad02..54a12bf4 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -23,10 +23,9 @@ process BEDTOOLS_SUBTRACT { output: tuple val(meta), path("*.bed"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index ce042933..aa4879ba 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -26,10 +26,9 @@ process BISMARK_ALIGN { tuple val(meta), path("*bam") , emit: bam tuple val(meta), path("*report.txt"), emit: report tuple val(meta), path("*fq.gz") , optional:true, emit: unmapped - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index 8555563d..c3ff27d6 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -24,10 +24,9 @@ process BISMARK_DEDUPLICATE { output: tuple val(meta), path("*.deduplicated.bam") , emit: bam tuple val(meta), path("*.deduplication_report.txt"), emit: report - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 0a3fae14..0a86173d 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -23,10 +23,9 @@ process BISMARK_GENOMEPREPARATION { output: path "BismarkIndex" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ bismark_genome_preparation \\ $options.args \\ diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index bafeaad6..5e89e6f8 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -28,11 +28,10 @@ process BISMARK_METHYLATIONEXTRACTOR { tuple val(meta), path("*.cov.gz") , emit: coverage tuple val(meta), path("*_splitting_report.txt"), emit: report tuple val(meta), path("*.M-bias.txt") , emit: mbias - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: def seqtype = meta.single_end ? '-s' : '-p' - def software = getSoftwareName(task.process) """ bismark_methylation_extractor \\ --bedGraph \\ diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index d7ab3e01..70c6ba3b 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -23,10 +23,9 @@ process BISMARK_REPORT { output: tuple val(meta), path("*report.{html,txt}"), emit: report - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ bismark2report $options.args diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index d71772b3..3d5f294e 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -26,10 +26,9 @@ process BISMARK_SUMMARY { output: path "*report.{html,txt}", emit: summary - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ bismark2summary diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 1146ede4..0d65f1d0 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -24,10 +24,9 @@ process BLAST_BLASTN { output: tuple val(meta), path('*.blastn.txt'), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index 9ee02108..0538e0db 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -23,10 +23,9 @@ process BLAST_MAKEBLASTDB { output: path 'blast_db' , emit: db - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ makeblastdb \\ -in $fasta \\ diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 7c71cb82..764b5be2 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -25,11 +25,10 @@ process BOWTIE_ALIGN { output: tuple val(meta), path('*.bam'), emit: bam tuple val(meta), path('*.out'), emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 3ae07729..1b83541b 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -23,10 +23,9 @@ process BOWTIE_BUILD { output: path 'bowtie' , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ mkdir bowtie bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 4a972373..6f923951 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -25,11 +25,10 @@ process BOWTIE2_ALIGN { output: tuple val(meta), path('*.bam'), emit: bam tuple val(meta), path('*.log'), emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index f140d7a4..bc95eea8 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -23,10 +23,9 @@ process BOWTIE2_BUILD { output: path 'bowtie2' , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ mkdir bowtie2 bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index ae4ee147..07135aea 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -24,10 +24,9 @@ process BWA_ALN { output: tuple val(meta), path("*.sai"), emit: sai - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index 9de3fe0c..479431ed 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -23,10 +23,9 @@ process BWA_INDEX { output: path "bwa" , emit: index - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) """ mkdir bwa bwa \\ diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index f20e0c39..b6a548d7 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -24,10 +24,9 @@ process BWA_MEM { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index f4519541..38127793 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -24,10 +24,9 @@ process BWA_SAMPE { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 5303b24f..68fa95c7 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -24,10 +24,9 @@ process BWA_SAMSE { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index 9274ebe8..5732017f 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -23,10 +23,9 @@ process BWAMEM2_INDEX { output: path "bwamem2" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ mkdir bwamem2 bwa-mem2 \\ diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index ea584a39..f88d840f 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -24,10 +24,9 @@ process BWAMEM2_MEM { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index d78055fc..9b1d2b86 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -24,10 +24,9 @@ process BWAMETH_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index a7a0b783..68fb33d4 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -23,10 +23,9 @@ process BWAMETH_INDEX { output: path "bwameth" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ bwameth.py index $fasta diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 2dc9944f..dac301cb 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -23,7 +23,7 @@ process CAT_CAT { output: path "${file_out}*" , emit: file_out - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: def file_list = files_in.collect { it.toString() } diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index 712364e1..538915a7 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -23,7 +23,7 @@ process CAT_FASTQ { output: tuple val(meta), path("*.merged.fastq.gz"), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index c7b0a5a3..9826eed1 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -34,10 +34,9 @@ process CHROMAP_CHROMAP { tuple val(meta), path("*.bam") , optional:true, emit: bam tuple val(meta), path("*.tagAlign.gz"), optional:true, emit: tagAlign tuple val(meta), path("*.pairs.gz") , optional:true, emit: pairs - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def args = options.args.tokenize() diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index 61b7a856..efe85733 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -25,10 +25,9 @@ process CHROMAP_INDEX { output: path "*.index" , emit: index - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) def prefix = fasta.baseName """ chromap \\ diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/main.nf index 1219584c..27c8bb0d 100755 --- a/modules/cnvkit/main.nf +++ b/modules/cnvkit/main.nf @@ -28,10 +28,9 @@ process CNVKIT { tuple val(meta), path("*.cnn"), emit: cnn tuple val(meta), path("*.cnr"), emit: cnr tuple val(meta), path("*.cns"), emit: cns - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ cnvkit.py \\ batch \\ diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index ee8b347e..5728b649 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -25,10 +25,9 @@ process COOLER_DIGEST { output: path "*.bed" , emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ cooler digest \\ $options.args \\ diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 7d456107..2028f5f0 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -23,10 +23,9 @@ process COOLER_DUMP { output: tuple val(meta), path("*.bedpe"), emit: bedpe - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ cooler dump \\ diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index 8424ab07..cf10a8e0 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -22,9 +22,9 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { path versions output: - path 'software_versions.yml' , emit: yml - path 'software_versions_mqc.yml', emit: mqc_yml - path 'versions.yml' , emit: versions + path "software_versions.yml" , emit: yml + path "software_versions_mqc.yml", emit: mqc_yml + path "versions.yml" , emit: versions script: """ diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index 3baf9c7f..32faf2cf 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -24,10 +24,9 @@ process CUTADAPT { output: tuple val(meta), path('*.trim.fastq.gz'), emit: reads tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index cbb27944..1537b019 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -25,10 +25,9 @@ process DAMAGEPROFILER { output: tuple val(meta), path("${prefix}"), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index 21a18526..9fffdb8e 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -25,10 +25,9 @@ process DEEPTOOLS_COMPUTEMATRIX { output: tuple val(meta), path("*.mat.gz") , emit: matrix tuple val(meta), path("*.mat.tab"), emit: table - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ computeMatrix \\ diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index 9271a399..b2d167f9 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -25,10 +25,9 @@ process DEEPTOOLS_PLOTFINGERPRINT { tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.raw.txt") , emit: matrix tuple val(meta), path("*.qcmetrics.txt"), emit: metrics - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 49362666..19c243df 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -24,10 +24,9 @@ process DEEPTOOLS_PLOTHEATMAP { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.tab"), emit: table - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ plotHeatmap \\ diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index cba8e161..3a196bd5 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -24,10 +24,9 @@ process DEEPTOOLS_PLOTPROFILE { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.tab"), emit: table - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ plotProfile \\ diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index 0688949e..59979dc9 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -26,10 +26,9 @@ process DELLY_CALL { output: tuple val(meta), path("*.bcf"), emit: bcf tuple val(meta), path("*.csi"), emit: csi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ delly \\ diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 556f150c..6afc66c4 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -26,10 +26,9 @@ process DIAMOND_BLASTP { output: tuple val(meta), path('*.txt'), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index 8b0227a2..db2953da 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -26,10 +26,9 @@ process DIAMOND_BLASTX { output: tuple val(meta), path('*.txt'), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index 27383955..e4533f8f 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -25,10 +25,9 @@ process DIAMOND_MAKEDB { output: path "${fasta}.dmnd", emit: db - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ diamond \\ makedb \\ diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index 090c9a13..f9dc9004 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -27,10 +27,9 @@ process DRAGONFLYE { tuple val(meta), path("{flye,miniasm,raven}.fasta") , emit: raw_contigs tuple val(meta), path("{miniasm,raven}-unpolished.gfa"), optional:true , emit: gfa tuple val(meta), path("flye-info.txt"), optional:true , emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def memory = task.memory.toGiga() """ dragonflye \\ diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 6016f777..84f59e89 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -23,10 +23,9 @@ process DSHBIO_EXPORTSEGMENTS { output: tuple val(meta), path("*.fa"), emit: fasta - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ dsh-bio \\ diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 3f2a068d..35039f21 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -23,10 +23,9 @@ process DSHBIO_FILTERBED { output: tuple val(meta), path("*.bed.gz"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ dsh-bio \\ diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index 2a1ad816..bf677da8 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -23,10 +23,9 @@ process DSHBIO_FILTERGFF3 { output: tuple val(meta), path("*.gff3.gz"), emit: gff3 - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ dsh-bio \\ diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 388ba0ef..3e8d656c 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -23,10 +23,9 @@ process DSHBIO_SPLITBED { output: tuple val(meta), path("*.bed.gz"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ dsh-bio \\ diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index b8f81392..dd477181 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -23,10 +23,9 @@ process DSHBIO_SPLITGFF3 { output: tuple val(meta), path("*.gff3.gz"), emit: gff3 - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ dsh-bio \\ diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 17eaf720..ad9c38a6 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -33,10 +33,9 @@ process ENSEMBLVEP { output: tuple val(meta), path("*.ann.vcf"), emit: vcf path "*.summary.html" , emit: report - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" dir_cache = params.use_cache ? "\${PWD}/${cache}" : "/.vep" """ diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 1c02f404..845de15d 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -25,10 +25,9 @@ process EXPANSIONHUNTER { output: tuple val(meta), path("*.vcf"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 7ee35a0d..5c6366f9 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -24,10 +24,9 @@ process FASTANI { output: tuple val(meta), path("*.ani.txt"), emit: ani - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.batch_input) { diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index 11cd30b4..e99540d5 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -28,13 +28,12 @@ process FASTP { tuple val(meta), path('*.json') , emit: json tuple val(meta), path('*.html') , emit: html tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path('*.fail.fastq.gz') , optional:true, emit: reads_fail tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged script: // Added soft-links to original fastqs for consistent naming in MultiQC - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 88bfbf5b..9f6cfc55 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -24,11 +24,11 @@ process FASTQC { output: tuple val(meta), path("*.html"), emit: html tuple val(meta), path("*.zip") , emit: zip - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz @@ -36,7 +36,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } else { @@ -47,7 +47,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } diff --git a/modules/fasttree/main.nf b/modules/fasttree/main.nf index 08c093b2..5f81d1f2 100644 --- a/modules/fasttree/main.nf +++ b/modules/fasttree/main.nf @@ -22,10 +22,9 @@ process FASTTREE { output: path "*.tre", emit: phylogeny - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ fasttree \\ $options.args \\ diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index a3d047a7..23056b90 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -22,10 +22,9 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ fgbio \\ diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 928765f5..34e0b377 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -22,10 +22,9 @@ process FGBIO_SORTBAM { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ fgbio \\ diff --git a/modules/flash/main.nf b/modules/flash/main.nf index f9a381e9..912b2961 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -22,10 +22,9 @@ process FLASH { output: tuple val(meta), path("*.fastq.gz"), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ flash \\ diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 91c23b29..e804bcff 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -27,10 +27,9 @@ process GATK4_APPLYBQSR { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" """ diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 2f368014..6033fbf1 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -29,10 +29,9 @@ process GATK4_BASERECALIBRATOR { output: tuple val(meta), path("*.table"), emit: table - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 28b88f5b..064247cc 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -24,10 +24,9 @@ process GATK4_BEDTOINTERVALLIST { output: tuple val(meta), path('*.interval_list'), emit: interval_list - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk BedToIntervalList \\ diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index b384d405..12372bdf 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -23,10 +23,9 @@ process GATK4_CREATESEQUENCEDICTIONARY { output: path "*.dict" , emit: dict - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def avail_mem = 6 if (!task.memory) { log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index cb8ec0ea..ebd081ac 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -23,10 +23,9 @@ process GATK4_FASTQTOSAM { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" """ diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 782b7653..09449f12 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -26,10 +26,9 @@ process GATK4_GETPILEUPSUMMARIES { output: tuple val(meta), path('*.pileups.table'), emit: table - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def sitesCommand = '' diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 63771393..01b71ccb 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -27,10 +27,9 @@ process GATK4_HAPLOTYPECALLER { output: tuple val(meta), path("*.vcf.gz"), emit: vcf tuple val(meta), path("*.tbi") , emit: tbi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 99257354..2f464919 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -23,10 +23,9 @@ process GATK4_INTERVALLISTTOOLS { output: tuple val(meta), path("*_split/*/*.interval_list"), emit: interval_list - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 3a3c8e70..8f94f4dd 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -24,10 +24,9 @@ process GATK4_MARKDUPLICATES { output: tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.metrics"), emit: metrics - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk MarkDuplicates \\ diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 978b7cff..0c9fe5ee 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -26,10 +26,9 @@ process GATK4_MERGEBAMALIGNMENT { output: tuple val(meta), path('*.bam'), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk MergeBamAlignment \\ diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index c62a6289..ce9a52c3 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -25,10 +25,9 @@ process GATK4_MERGEVCFS { output: tuple val(meta), path('*.vcf.gz'), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" // Make list of VCFs to merge diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index c4efc724..9b3f8b3f 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -35,10 +35,9 @@ process GATK4_MUTECT2 { tuple val(meta), path("*.tbi") , emit: tbi tuple val(meta), path("*.stats") , emit: stats tuple val(meta), path("*.f1r2.tar.gz"), optional:true, emit: f1r2 - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def inputsList = [] def normalsList = [] diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 0a95b604..b3c9085a 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -23,10 +23,9 @@ process GATK4_REVERTSAM { output: tuple val(meta), path('*.bam'), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk RevertSam \\ diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index eed7a83f..324f3bae 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -23,10 +23,9 @@ process GATK4_SAMTOFASTQ { output: tuple val(meta), path('*.fastq.gz'), emit: fastq - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" """ diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 0c4ba163..793cc671 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -24,10 +24,9 @@ process GATK4_SPLITNCIGARREADS { output: tuple val(meta), path('*.bam'), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk SplitNCigarReads \\ diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index a79bce8f..28084645 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -26,11 +26,10 @@ process GATK4_VARIANTFILTRATION { output: tuple val(meta), path("*.vcf"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gatk VariantFiltration \\ diff --git a/modules/genmap/index/main.nf b/modules/genmap/index/main.nf index f1168d4e..c79596f0 100644 --- a/modules/genmap/index/main.nf +++ b/modules/genmap/index/main.nf @@ -23,10 +23,9 @@ process GENMAP_INDEX { output: path "genmap" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ genmap \\ index \\ diff --git a/modules/genmap/mappability/main.nf b/modules/genmap/mappability/main.nf index 9eeb4253..4d858cbb 100644 --- a/modules/genmap/mappability/main.nf +++ b/modules/genmap/mappability/main.nf @@ -25,10 +25,9 @@ process GENMAP_MAPPABILITY { path "*.wig" , optional:true, emit: wig path "*.bedgraph" , optional:true, emit: bedgraph path "*.txt" , optional:true, emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ genmap \\ map \\ diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 1622e98d..4133ee08 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -23,10 +23,9 @@ process GFFREAD { output: path "*.gtf" , emit: gtf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${gff.baseName}${options.suffix}" : "${gff.baseName}" """ gffread \\ diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 5cff088b..1384334f 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -23,10 +23,9 @@ process GLNEXUS { output: tuple val(meta), path("*.bcf"), emit: bcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" // Make list of GVCFs to merge diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index 30d6cbfd..831b0b3b 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -26,10 +26,9 @@ process GRAPHMAP2_ALIGN { output: tuple val(meta), path("*.sam"), emit: sam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ graphmap2 \\ diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index 194c3594..a8b03074 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -22,10 +22,9 @@ process GRAPHMAP2_INDEX { output: path "*.gmidx" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ graphmap2 \\ align \\ diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index 10117ae7..da194906 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -30,10 +30,9 @@ process GUBBINS { path "*.branch_base_reconstruction.embl", emit: embl_branch path "*.final_tree.tre" , emit: tree path "*.node_labelled.final_tree.tre" , emit: tree_labelled - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ run_gubbins.py \\ --threads $task.cpus \\ diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index 6a2287b6..aec4569f 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -23,10 +23,9 @@ process GUNZIP { output: path "$gunzip", emit: gunzip - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) gunzip = archive.toString() - '.gz' """ gunzip \\ diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 2597afa9..9dfc9618 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -34,10 +34,9 @@ process HIFIASM { tuple val(meta), path("*.asm.a_ctg.gfa") , emit: alternate_contigs, optional: true tuple val(meta), path("*.hap1.p_ctg.gfa") , emit: paternal_contigs , optional: true tuple val(meta), path("*.hap2.p_ctg.gfa") , emit: maternal_contigs , optional: true - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (use_parental_kmers) { """ diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 583ddc3f..9b73216b 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -28,12 +28,11 @@ process HISAT2_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam tuple val(meta), path("*.log"), emit: summary - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path("*fastq.gz"), optional:true, emit: fastq script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def strandedness = '' diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index ae24a6aa..015f6f59 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -28,7 +28,7 @@ process HISAT2_BUILD { output: path "hisat2" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: def avail_mem = 0 @@ -53,7 +53,6 @@ process HISAT2_BUILD { log.info "[HISAT2 index build] Use --hisat2_build_memory [small number] to skip this check." } - def software = getSoftwareName(task.process) """ mkdir hisat2 $extract_exons diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index 3387cbd1..1c8b7830 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -25,10 +25,9 @@ process HISAT2_EXTRACTSPLICESITES { output: path "*.splice_sites.txt", emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt cat <<-END_VERSIONS > versions.yml diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index a4166fcb..b4292feb 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -24,10 +24,9 @@ process HMMER_HMMALIGN { output: tuple val(meta), path("*.sthlm.gz"), emit: sthlm - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 198ae1fe..1714644b 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -27,10 +27,9 @@ process HOMER_ANNOTATEPEAKS { output: tuple val(meta), path("*annotatePeaks.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ annotatePeaks.pl \\ diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index fe8399a1..2e0b6db9 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -25,10 +25,9 @@ process HOMER_FINDPEAKS { output: tuple val(meta), path("*peaks.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index daf0ce60..4f531e82 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -26,10 +26,9 @@ process HOMER_MAKETAGDIRECTORY { output: tuple val(meta), path("tag_dir"), emit: tagdir - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ makeTagDirectory \\ diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index 5b23e243..c56da24b 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -25,10 +25,9 @@ process HOMER_MAKEUCSCFILE { output: tuple val(meta), path("tag_dir/*ucsc.bedGraph.gz"), emit: bedGraph - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ makeUCSCfile \\ diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index 357faf33..bec879df 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -24,10 +24,9 @@ process IQTREE { output: path "*.treefile", emit: phylogeny - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def fconst_args = constant_sites ? "-fconst $constant_sites" : '' def memory = task.memory.toString().replaceAll(' ', '') """ diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index b29450b7..33fa11f7 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -26,10 +26,9 @@ process IVAR_CONSENSUS { tuple val(meta), path("*.fa") , emit: fasta tuple val(meta), path("*.qual.txt"), emit: qual tuple val(meta), path("*.mpileup") , optional:true, emit: mpileup - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 2a698249..6cf8171c 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -25,10 +25,9 @@ process IVAR_TRIM { output: tuple val(meta), path("*.bam"), emit: bam tuple val(meta), path('*.log'), emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ ivar trim \\ diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index 2bf82a37..d079a8e9 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -26,10 +26,9 @@ process IVAR_VARIANTS { output: tuple val(meta), path("*.tsv") , emit: tsv tuple val(meta), path("*.mpileup"), optional:true, emit: mpileup - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 801f339e..96457b6d 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -23,10 +23,9 @@ process KALLISTO_INDEX { output: path "kallisto" , emit: idx - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ kallisto \\ index \\ diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index b0dd3a06..8c705e51 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -29,10 +29,9 @@ process KALLISTOBUSTOOLS_COUNT { output: tuple val(meta), path ("*.count"), emit: count - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index c8e02687..a8287498 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -24,7 +24,7 @@ process KALLISTOBUSTOOLS_REF { val workflow output: - path "versions.yml" , emit: version + path "versions.yml" , emit: versions path "kb_ref_out.idx" , emit: index path "t2g.txt" , emit: t2g path "cdna.fa" , emit: cdna @@ -33,7 +33,6 @@ process KALLISTOBUSTOOLS_REF { path "intron_t2c.txt" , optional:true, emit: intron_t2c script: - def software = getSoftwareName(task.process) if (workflow == "standard") { """ kb \\ diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index 5a4be104..5bb76ad0 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -23,10 +23,9 @@ process KLEBORATE { output: tuple val(meta), path("*.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ kleborate \\ diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index cc269e98..0d4e5840 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -26,10 +26,9 @@ process KRAKEN2_KRAKEN2 { tuple val(meta), path('*classified*') , emit: classified tuple val(meta), path('*unclassified*'), emit: unclassified tuple val(meta), path('*report.txt') , emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index ca30bbff..d02e98ad 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -25,10 +25,9 @@ process LAST_DOTPLOT { output: tuple val(meta), path("*.gif"), optional:true, emit: gif tuple val(meta), path("*.png"), optional:true, emit: png - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ last-dotplot \\ diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index 3d6518a4..c4335f25 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -24,10 +24,9 @@ process LAST_LASTAL { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index ac552e7d..fb765ada 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -23,10 +23,9 @@ process LAST_LASTDB { output: tuple val(meta), path("lastdb"), emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ mkdir lastdb diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index e112cbd8..5e259109 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -32,10 +32,9 @@ process LAST_MAFCONVERT { tuple val(meta), path("*.psl.gz"), optional:true, emit: psl_gz tuple val(meta), path("*.sam.gz"), optional:true, emit: sam_gz tuple val(meta), path("*.tab.gz"), optional:true, emit: tab_gz - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ maf-convert $options.args $format $maf | gzip --no-name \\ diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index f597693c..5ce38c92 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -23,10 +23,9 @@ process LAST_MAFSWAP { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ maf-swap $options.args $maf | gzip --no-name > ${prefix}.swapped.maf.gz diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index d3fa02e3..3102fbe6 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -23,10 +23,9 @@ process LAST_POSTMASK { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index 78d59ed4..2a9e5621 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -23,10 +23,9 @@ process LAST_SPLIT { output: tuple val(meta), path("*.maf.gz"), emit: maf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ zcat < $maf | last-split $options.args | gzip --no-name > ${prefix}.maf.gz diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index 39728ced..f0b958bc 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -24,10 +24,9 @@ process LAST_TRAIN { output: tuple val(meta), path("*.par"), emit: param_file - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 1ff5ac48..16525953 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -28,7 +28,7 @@ process LIMA { tuple val(meta), path("*.guess") , emit: guess tuple val(meta), path("*.report") , emit: report tuple val(meta), path("*.summary"), emit: summary - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path("*.bam") , optional: true, emit: bam tuple val(meta), path("*.bam.pbi") , optional: true, emit: pbi @@ -41,7 +41,6 @@ process LIMA { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - """ OUT_EXT="" @@ -67,7 +66,7 @@ process LIMA { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - lima: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) + ${getSoftwareName(task.process)}: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) END_VERSIONS """ } diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index 9fb113ff..e77d7a78 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -24,10 +24,9 @@ process LOFREQ_CALL { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ lofreq \\ diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 42400793..a86748d7 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -25,10 +25,9 @@ process LOFREQ_CALLPARALLEL { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ lofreq \\ diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 09c91c8c..905a961d 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -23,10 +23,9 @@ process LOFREQ_FILTER { output: tuple val(meta), path("*.gz"), emit: vcf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ lofreq \\ diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index 78466574..b33a1e04 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -23,10 +23,9 @@ process LOFREQ_INDELQUAL { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ lofreq indelqual \\ diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index 4fcd6b05..d54d406d 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -25,14 +25,13 @@ process MACS2_CALLPEAK { output: tuple val(meta), path("*.{narrowPeak,broadPeak}"), emit: peak tuple val(meta), path("*.xls") , emit: xls - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path("*.gappedPeak"), optional:true, emit: gapped tuple val(meta), path("*.bed") , optional:true, emit: bed tuple val(meta), path("*.bdg") , optional:true, emit: bdg script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def format = meta.single_end ? 'BAM' : 'BAMPE' def control = controlbam ? "--control $controlbam" : '' diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index 3b494c0c..48259a50 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -26,11 +26,10 @@ process MALT_BUILD { output: path "malt_index/" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions path "malt-build.log", emit: log script: - def software = getSoftwareName(task.process) def avail_mem = 6 if (!task.memory) { log.info '[MALT_BUILD] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index 689dabf4..bc78de8c 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -27,10 +27,9 @@ process MALT_RUN { path "*.rma6" , emit: rma6 path "*.{tab,text,sam}", optional:true, emit: alignments path "*.log" , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def avail_mem = 6 if (!task.memory) { log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf index 426a9fc3..d909ec96 100644 --- a/modules/maltextract/main.nf +++ b/modules/maltextract/main.nf @@ -25,10 +25,9 @@ process MALTEXTRACT { output: path "results" , emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ MaltExtract \\ -Xmx${task.memory.toGiga()}g \\ diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index ed018b1a..7a99cc50 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -22,10 +22,9 @@ process MASH_SKETCH { output: tuple val(meta), path("*.msh") , emit: mash tuple val(meta), path("*.mash_stats") , emit: stats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ mash \\ diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 8893c2ab..c5157b66 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -26,10 +26,9 @@ process METAPHLAN3 { tuple val(meta), path("*_profile.txt") , emit: profile tuple val(meta), path("*.biom") , emit: biom tuple val(meta), path('*.bowtie2out.txt'), optional:true, emit: bt2out - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index 149f4aa0..94e4b379 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -25,10 +25,9 @@ process METHYLDACKEL_EXTRACT { output: tuple val(meta), path("*.bedGraph"), emit: bedgraph - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ MethylDackel extract \\ $options.args \\ diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index 9fa39b82..c8fd2fa2 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -25,10 +25,9 @@ process METHYLDACKEL_MBIAS { output: tuple val(meta), path("*.mbias.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ MethylDackel mbias \\ diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 9ab344fd..140ef9e7 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -25,10 +25,9 @@ process MINIA { tuple val(meta), path('*.contigs.fa'), emit: contigs tuple val(meta), path('*.unitigs.fa'), emit: unitigs tuple val(meta), path('*.h5') , emit: h5 - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def read_list = reads.join(",") """ diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index d0ff9c0f..215e4fb5 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -24,10 +24,9 @@ process MINIMAP2_ALIGN { output: tuple val(meta), path("*.paf"), emit: paf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index cfc40417..b154a649 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -22,10 +22,9 @@ process MINIMAP2_INDEX { output: path "*.mmi" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ minimap2 \\ -t $task.cpus \\ diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index c21ea2a8..8fe3cfee 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -31,10 +31,9 @@ process MOSDEPTH { tuple val(meta), path('*.per-base.bed.gz.csi'), emit: per_base_csi tuple val(meta), path('*.regions.bed.gz') , emit: regions_bed tuple val(meta), path('*.regions.bed.gz.csi') , emit: regions_csi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index 41f79b3a..bd5a0a0e 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -26,10 +26,9 @@ process MSISENSOR_MSI { tuple val(meta), path("${prefix}_dis") , emit: output_dis tuple val(meta), path("${prefix}_germline"), emit: output_germline tuple val(meta), path("${prefix}_somatic") , emit: output_somatic - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ msisensor \\ diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index 198657ae..ebd8785a 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -22,12 +22,11 @@ process MSISENSOR_SCAN { tuple val(meta), path(fasta) output: - tuple (val(meta), path("*.tab"), emit: txt) - path ("versions.yml" , emit: version) + tuple val(meta), path("*.tab"), emit: txt + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ msisensor \\ scan \\ diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 2e7ad932..0861aa59 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -24,16 +24,15 @@ process MULTIQC { path "*multiqc_report.html", emit: report path "*_data" , emit: data path "*_plots" , optional:true, emit: plots - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ multiqc -f $options.args . cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + ${getSoftwareName(task.process)}: \$( multiqc --version | sed -e "s/multiqc, version //g" ) END_VERSIONS """ } diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index ef9bf484..6ffb97ac 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -30,10 +30,9 @@ process MUSCLE { tuple val(meta), path("*.msf") , optional: true, emit: msf tuple val(meta), path("*.tree"), optional: true, emit: tree path "*.log" , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def fasta_out = options.args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' def clw_out = options.args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index 84cf579a..271592f7 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -25,10 +25,9 @@ process NANOLYSE { output: tuple val(meta), path("*.fastq.gz"), emit: fastq path "*.log" , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index e36b2da2..16e2248c 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -26,10 +26,9 @@ process NANOPLOT { tuple val(meta), path("*.png") , emit: png tuple val(meta), path("*.txt") , emit: txt tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def input_file = ("$ontfile".endsWith(".fastq.gz")) ? "--fastq ${ontfile}" : ("$ontfile".endsWith(".txt")) ? "--summary ${ontfile}" : '' """ diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index fabf4520..6fc6efc4 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -27,10 +27,9 @@ process NEXTCLADE { tuple val(meta), path("${prefix}.tree.json") , emit: json_tree tuple val(meta), path("${prefix}.tsv") , emit: tsv tuple val(meta), path("${prefix}.clades.tsv"), optional:true, emit: tsv_clades - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ nextclade \\ diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 4f136d7c..083b03a7 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -23,10 +23,9 @@ process OPTITYPE { output: tuple val(meta), path("${prefix}"), emit: output - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index c00af657..4bfd3b0d 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -23,10 +23,9 @@ process PAIRIX { output: tuple val(meta), path(pair), path("*.px2"), emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ pairix \\ $options.args \\ diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index 5b901a77..eabf24dd 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -24,10 +24,9 @@ process PAIRTOOLS_DEDUP { output: tuple val(meta), path("*.pairs.gz") , emit: pairs tuple val(meta), path("*.pairs.stat"), emit: stat - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pairtools dedup \\ diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 3010b411..50cfdfd2 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -24,10 +24,9 @@ process PAIRTOOLS_FLIP { output: tuple val(meta), path("*.flip.gz"), emit: flip - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pairtools \\ diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index 66c9257b..cd6099e1 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -25,10 +25,9 @@ process PAIRTOOLS_PARSE { output: tuple val(meta), path("*.pairsam.gz") , emit: pairsam tuple val(meta), path("*.pairsam.stat"), emit: stat - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pairtools \\ diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index 31f463ad..b1b21da7 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -24,10 +24,9 @@ process PAIRTOOLS_RESTRICT { output: tuple val(meta), path("*.pairs.gz"), emit: restrict - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pairtools \\ diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index c9218ea9..dec29573 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -24,10 +24,9 @@ process PAIRTOOLS_SELECT { output: tuple val(meta), path("*.selected.pairs.gz") , emit: selected tuple val(meta), path("*.unselected.pairs.gz"), emit: unselected - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pairtools select \\ diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 27caed7b..996bcb0b 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -23,10 +23,9 @@ process PAIRTOOLS_SORT { output: tuple val(meta), path("*.pairs.gz"), emit: sorted - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index 5292d1c3..edf67dd7 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -23,10 +23,9 @@ process PANGOLIN { output: tuple val(meta), path('*.csv'), emit: report - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pangolin \\ diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 5df852cf..49c47fda 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -29,11 +29,9 @@ process PBCCS { tuple val(meta), path("*.ccs_report.txt" ) , emit: ccs_report_txt tuple val(meta), path("*.ccs_report.json" ) , emit: ccs_report_json tuple val(meta), path("*.zmw_metrics.json.gz"), emit: zmw_metrics - tuple val(meta), path("versions.yml" ) , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - // def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def ccs = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs.bam' def report_txt = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.txt' def report_json = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.json' diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index 166ed8be..b390bf7e 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -27,10 +27,9 @@ process PHANTOMPEAKQUALTOOLS { tuple val(meta), path("*.out") , emit: spp tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.Rdata"), emit: rdata - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ RUN_SPP=`which run_spp.R` diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index 11ddee9b..dd8fdaca 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -25,10 +25,9 @@ process PICARD_COLLECTMULTIPLEMETRICS { output: tuple val(meta), path("*_metrics"), emit: metrics tuple val(meta), path("*.pdf") , emit: pdf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index b5d11839..6028feef 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -24,10 +24,9 @@ process PICARD_COLLECTWGSMETRICS { output: tuple val(meta), path("*_metrics"), emit: metrics - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index c7e40d27..68cee34d 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -24,10 +24,9 @@ process PICARD_FILTERSAMREADS { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index dc8d460b..37b825d7 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -25,10 +25,9 @@ process PICARD_MARKDUPLICATES { tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.bai") , optional:true, emit: bai tuple val(meta), path("*.metrics.txt"), emit: metrics - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index c6ecfe58..355c0bf3 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -23,10 +23,9 @@ process PICARD_MERGESAMFILES { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index 475a30f9..939df1c0 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -25,10 +25,9 @@ process PICARD_SORTSAM { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 792b3c12..1edc5eeb 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -31,10 +31,9 @@ process PLASMIDID { tuple val(meta), path("${prefix}/database/") , emit: database tuple val(meta), path("${prefix}/fasta_files/") , emit: fasta_files tuple val(meta), path("${prefix}/kmer/") , emit: kmer - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ plasmidID \\ diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index 735fef88..a676b723 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -26,10 +26,9 @@ process PLINK_VCF { tuple val(meta), path("*.bim"), emit: bim, optional: true tuple val(meta), path("*.fam"), emit: fam, optional: true - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ @@ -41,7 +40,7 @@ process PLINK_VCF { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - plink: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) + ${getSoftwareName(task.process)}: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index 69f682d3..f551a549 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -25,10 +25,9 @@ process PRESEQ_LCEXTRAP { output: tuple val(meta), path("*.ccurve.txt"), emit: ccurve tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index 6944f86b..572ffe92 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -27,10 +27,9 @@ process PRODIGAL { tuple val(meta), path("${prefix}.fna"), emit: nucleotide_fasta tuple val(meta), path("${prefix}.faa"), emit: amino_acid_fasta tuple val(meta), path("${prefix}_all.txt"), emit: all_gene_annotations - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ prodigal -i "${genome}" \\ diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index 8aefda7c..fb86078c 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -35,10 +35,9 @@ process PROKKA { tuple val(meta), path("${prefix}/*.log"), emit: log tuple val(meta), path("${prefix}/*.txt"), emit: txt tuple val(meta), path("${prefix}/*.tsv"), emit: tsv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" diff --git a/modules/pycoqc/main.nf b/modules/pycoqc/main.nf index f3b164ee..2c263d61 100644 --- a/modules/pycoqc/main.nf +++ b/modules/pycoqc/main.nf @@ -24,10 +24,9 @@ process PYCOQC { output: path "*.html" , emit: html path "*.json" , emit: json - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ pycoQC \\ $options.args \\ diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index df787e44..9cfb8a1a 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -23,10 +23,9 @@ process PYDAMAGE_ANALYZE { output: tuple val(meta), path("pydamage_results/pydamage_results.csv"), emit: csv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ pydamage \\ diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 87677367..6cd7ae7a 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -23,10 +23,9 @@ process PYDAMAGE_FILTER { output: tuple val(meta), path("pydamage_results/pydamage_filtered_results.csv"), emit: csv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index be239816..b650fb8c 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -24,10 +24,9 @@ process QCAT { output: tuple val(meta), path("fastq/*.fastq.gz"), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ ## Unzip fastq file diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index 0cc101ef..d33f1e67 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -25,10 +25,9 @@ process QUALIMAP_BAMQC { output: tuple val(meta), path("${prefix}"), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index fa32a6a7..9492cec6 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -24,10 +24,9 @@ process QUALIMAP_RNASEQ { output: tuple val(meta), path("${prefix}"), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 97ff93e2..072d649d 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -27,10 +27,9 @@ process QUAST { output: path "${prefix}" , emit: results path '*.tsv' , emit: tsv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ?: software def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index 118ea7af..aa23b56e 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -25,10 +25,9 @@ process RAPIDNJ { output: path "*.sth" , emit: stockholm_alignment path "*.tre" , emit: phylogeny - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) """ python \\ -c 'from Bio import SeqIO; SeqIO.convert("$alignment", "fasta", "alignment.sth", "stockholm")' diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index 88f3a208..b9ba0b13 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -24,10 +24,9 @@ process RASUSA { output: tuple val(meta), path('*.fastq.gz'), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index e3bde2f3..f607b506 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -23,10 +23,9 @@ process RAXMLNG { output: path "*.raxml.bestTree", emit: phylogeny path "*.raxml.support" , optional:true, emit: phylogeny_bootstrapped - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ raxml-ng \\ $options.args \\ diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index d3d11397..f19392f7 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -27,14 +27,13 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("*.isoforms.results"), emit: counts_transcript tuple val(meta), path("*.stat") , emit: stat tuple val(meta), path("*.log") , emit: logs - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path("*.STAR.genome.bam") , optional:true, emit: bam_star tuple val(meta), path("${prefix}.genome.bam") , optional:true, emit: bam_genome tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def strandedness = '' diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index b4a613bd..7e671207 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -25,10 +25,9 @@ process RSEM_PREPAREREFERENCE { output: path "rsem" , emit: index path "rsem/*transcripts.fa", emit: transcript_fasta - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def args = options.args.tokenize() if (args.contains('--star')) { args.removeIf { it.contains('--star') } diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index fa71dd11..64939add 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -23,10 +23,9 @@ process RSEQC_BAMSTAT { output: tuple val(meta), path("*.bam_stat.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bam_stat.py \\ diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index a9842c0d..c5e94943 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -24,10 +24,9 @@ process RSEQC_INFEREXPERIMENT { output: tuple val(meta), path("*.infer_experiment.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ infer_experiment.py \\ diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index d98780f1..622cd5cd 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -28,10 +28,9 @@ process RSEQC_INNERDISTANCE { tuple val(meta), path("*mean.txt") , optional:true, emit: mean tuple val(meta), path("*.pdf") , optional:true, emit: pdf tuple val(meta), path("*.r") , optional:true, emit: rscript - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (!meta.single_end) { """ diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index cfb12d69..1b75d915 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -30,10 +30,9 @@ process RSEQC_JUNCTIONANNOTATION { tuple val(meta), path("*.Interact.bed"), optional:true, emit: interact_bed tuple val(meta), path("*junction.pdf") , optional:true, emit: pdf tuple val(meta), path("*events.pdf") , optional:true, emit: events_pdf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ junction_annotation.py \\ diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index a5aa5461..fa435aea 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -25,10 +25,9 @@ process RSEQC_JUNCTIONSATURATION { output: tuple val(meta), path("*.pdf"), emit: pdf tuple val(meta), path("*.r") , emit: rscript - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ junction_saturation.py \\ diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 56086c89..0c83fdf0 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -24,10 +24,9 @@ process RSEQC_READDISTRIBUTION { output: tuple val(meta), path("*.read_distribution.txt"), emit: txt - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ read_distribution.py \\ diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index ca7c2b13..bee82682 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -26,10 +26,9 @@ process RSEQC_READDUPLICATION { tuple val(meta), path("*pos.DupRate.xls"), emit: pos_xls tuple val(meta), path("*.pdf") , emit: pdf tuple val(meta), path("*.r") , emit: rscript - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ read_duplication.py \\ diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index 9e62eb8a..c3fcef01 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -24,10 +24,9 @@ process SALMON_INDEX { output: path "salmon" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def get_decoy_ids = "grep '^>' $genome_fasta | cut -d ' ' -f 1 > decoys.txt" def gentrome = "gentrome.fa" if (genome_fasta.endsWith('.gz')) { diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 397bdd31..7c2e0e17 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -28,10 +28,9 @@ process SALMON_QUANT { output: tuple val(meta), path("${prefix}"), emit: results - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def reference = "--index $index" diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index cccf2f7c..3da1d6fe 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -28,10 +28,9 @@ process SAMTOOLS_AMPLICONCLIP { tuple val(meta), path("*.bam") , emit: bam tuple val(meta), path("*.clipstats.txt") , optional:true, emit: stats tuple val(meta), path("*.cliprejects.bam"), optional:true, emit: rejects_bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index cdbae99b..80cedeab 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_FAIDX { output: path "*.fai" , emit: fai - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) """ samtools faidx $fasta cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 02110870..0b454360 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_FASTQ { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index d0cf86aa..f9115c6b 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_FLAGSTAT { output: tuple val(meta), path("*.flagstat"), emit: flagstat - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ samtools flagstat $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index 06a07964..b005088a 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_IDXSTATS { output: tuple val(meta), path("*.idxstats"), emit: idxstats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ samtools idxstats $bam > ${bam}.idxstats cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index c2ba4de7..febbc11c 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -24,10 +24,9 @@ process SAMTOOLS_INDEX { output: tuple val(meta), path("*.bai"), optional:true, emit: bai tuple val(meta), path("*.csi"), optional:true, emit: csi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ samtools index $options.args $bam cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index ec574105..34c40d57 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_MERGE { output: tuple val(meta), path("${prefix}.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools merge ${prefix}.bam $bams diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 903bfd33..9e120526 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -24,10 +24,9 @@ process SAMTOOLS_MPILEUP { output: tuple val(meta), path("*.mpileup"), emit: mpileup - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools mpileup \\ diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index edd558bf..b30f6f45 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_SORT { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 823b5f31..6218dd2d 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_STATS { output: tuple val(meta), path("*.stats"), emit: stats - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ samtools stats $bam > ${bam}.stats cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 110d5abf..ec1663e0 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -23,10 +23,9 @@ process SAMTOOLS_VIEW { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ samtools view $options.args $bam > ${prefix}.bam diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 8892ab6d..4c3fd922 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -25,10 +25,9 @@ process SEACR_CALLPEAK { output: tuple val(meta), path("*.bed"), emit: bed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ SEACR_1.3.sh \\ diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index b178b1da..80f55bb6 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -23,10 +23,9 @@ process SEQKIT_SPLIT2 { output: tuple val(meta), path("*${prefix}/*.gz"), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if(meta.single_end){ """ diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 277d74ca..3b039fb9 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -24,10 +24,9 @@ process SEQTK_SAMPLE { output: tuple val(meta), path("*.fastq.gz"), emit: reads - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index 41326402..df8783de 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -24,10 +24,9 @@ process SEQTK_SUBSEQ { output: path "*.gz" , emit: sequences - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 9c4fc12f..61ca70c6 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -25,10 +25,9 @@ process SEQUENZAUTILS_BAM2SEQZ { output: tuple val(meta), path("*.gz"), emit: seqz - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ sequenza-utils \\ diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index a352256a..c952bb70 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -23,10 +23,9 @@ process SEQUENZAUTILS_GCWIGGLE { output: tuple val(meta), path("*.wig.gz"), emit: wig - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ sequenza-utils \\ diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index e9b2836b..aaabce51 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -25,11 +25,10 @@ process SEQWISH_INDUCE { output: tuple val(meta), path("*.gfa"), emit: gfa - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ seqwish \\ diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index 92b10732..48425f9f 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -27,10 +27,9 @@ process SHOVILL { tuple val(meta), path("shovill.log") , emit: log tuple val(meta), path("{skesa,spades,megahit,velvet}.fasta"), emit: raw_contigs tuple val(meta), path("contigs.{fastg,gfa,LastGraph}") , optional:true, emit: gfa - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def memory = task.memory.toGiga() """ shovill \\ diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index ede94906..506a922a 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -23,10 +23,9 @@ process SNPDISTS { output: tuple val(meta), path("*.tsv"), emit: tsv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ snp-dists \\ diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 8b30360a..3a1f6a52 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -31,10 +31,9 @@ process SNPEFF { output: tuple val(meta), path("*.ann.vcf"), emit: vcf path "*.csv" , emit: report - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def avail_mem = 6 if (!task.memory) { log.info '[snpEff] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' diff --git a/modules/snpsites/main.nf b/modules/snpsites/main.nf index 5cc85773..543ee01c 100644 --- a/modules/snpsites/main.nf +++ b/modules/snpsites/main.nf @@ -23,11 +23,10 @@ process SNPSITES { output: path "*.fas" , emit: fasta path "*.sites.txt" , emit: constant_sites - path "versions.yml" , emit: version + path "versions.yml" , emit: versions env CONSTANT_SITES, emit: constant_sites_string script: - def software = getSoftwareName(task.process) """ snp-sites \\ $alignment \\ diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 01975979..f35b1468 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -25,10 +25,9 @@ process SORTMERNA { output: tuple val(meta), path("*.fastq.gz"), emit: reads tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def Refs = "" diff --git a/modules/spades/main.nf b/modules/spades/main.nf index a260de54..c21066e2 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -29,10 +29,9 @@ process SPADES { tuple val(meta), path('*.gene_clusters.fa'), optional:true, emit: gene_clusters tuple val(meta), path('*.assembly.gfa') , optional:true, emit: gfa tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" def custom_hmms = params.spades_hmm ? "--custom-hmms $hmm" : "" diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index 0e57128b..08def401 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -23,10 +23,9 @@ process STAPHOPIASCCMEC { output: tuple val(meta), path("*.tsv"), emit: tsv - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index 677d1f2a..e0ccba8c 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -29,7 +29,7 @@ process STAR_ALIGN { tuple val(meta), path('*Log.final.out') , emit: log_final tuple val(meta), path('*Log.out') , emit: log_out tuple val(meta), path('*Log.progress.out'), emit: log_progress - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path('*sortedByCoord.out.bam') , optional:true, emit: bam_sorted tuple val(meta), path('*toTranscriptome.out.bam'), optional:true, emit: bam_transcript @@ -39,7 +39,6 @@ process STAR_ALIGN { tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" def seq_platform = params.seq_platform ? "'PL:$params.seq_platform'" : "" diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index 520f6b21..c932fafe 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -25,10 +25,9 @@ process STAR_GENOMEGENERATE { output: path "star" , emit: index - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' def args = options.args.tokenize() if (args.contains('--genomeSAindexNbases')) { diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index d2203fa4..64a01e6c 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -29,10 +29,9 @@ process STRELKA_GERMLINE { tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi tuple val(meta), path("*genome.vcf.gz") , emit: genome_vcf tuple val(meta), path("*genome.vcf.gz.tbi") , emit: genome_vcf_tbi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def regions = params.target_bed ? "--exome --callRegions ${target_bed}" : "" """ @@ -51,7 +50,7 @@ process STRELKA_GERMLINE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - strelka: \$( configureStrelkaGermlineWorkflow.py --version ) + ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index 85670a91..371533bb 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -24,10 +24,9 @@ process STRINGTIE_MERGE { output: path "stringtie.merged.gtf", emit: gtf - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ stringtie \\ --merge $stringtie_gtf \\ diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 92986dba..3579e47c 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -27,10 +27,9 @@ process STRINGTIE { tuple val(meta), path("*.transcripts.gtf"), emit: transcript_gtf tuple val(meta), path("*.abundance.txt") , emit: abundance tuple val(meta), path("*.ballgown") , emit: ballgown - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def strandedness = '' diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 76209a0d..0a0285db 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -24,10 +24,9 @@ process SUBREAD_FEATURECOUNTS { output: tuple val(meta), path("*featureCounts.txt") , emit: counts tuple val(meta), path("*featureCounts.txt.summary"), emit: summary - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-p' diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index c76588df..43726f17 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -23,10 +23,9 @@ process TABIX_BGZIP { output: tuple val(meta), path("*.gz"), emit: gz - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bgzip -c $options.args $input > ${prefix}.${input.getExtension()}.gz diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 302c8500..e44a7226 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -23,10 +23,9 @@ process TABIX_BGZIPTABIX { output: tuple val(meta), path("*.gz"), path("*.tbi"), emit: tbi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bgzip -c $options.args $input > ${prefix}.gz diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index 1fabeba4..1574c0b5 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -23,10 +23,9 @@ process TABIX_TABIX { output: tuple val(meta), path("*.tbi"), emit: tbi - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ tabix $options.args $tab diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index fce5c49b..e262221a 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -27,10 +27,9 @@ process TIDDIT_SV { tuple val(meta), path("*.vcf") , emit: vcf tuple val(meta), path("*.ploidy.tab") , emit: ploidy tuple val(meta), path("*.signals.tab"), emit: signals - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 6f5a65c2..8e77f1f7 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -24,7 +24,7 @@ process TRIMGALORE { output: tuple val(meta), path("*.fq.gz") , emit: reads tuple val(meta), path("*report.txt"), emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions tuple val(meta), path("*.html"), emit: html optional true tuple val(meta), path("*.zip") , emit: zip optional true @@ -48,7 +48,6 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 2f9b287b..81f39a6f 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -26,10 +26,9 @@ process UCSC_BED12TOBIGBED { output: tuple val(meta), path("*.bigBed"), emit: bigbed - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedToBigBed \\ diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index c001b410..5fbc2b3b 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -26,10 +26,9 @@ process UCSC_BEDCLIP { output: tuple val(meta), path("*.bedGraph"), emit: bedgraph - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedClip \\ diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index 4a779644..f55cdb07 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -26,10 +26,9 @@ process UCSC_BEDGRAPHTOBIGWIG { output: tuple val(meta), path("*.bigWig"), emit: bigwig - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedGraphToBigWig \\ diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 76c00cfe..72491443 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -26,10 +26,9 @@ process UCSC_BIGWIGAVERAGEOVERBED { output: tuple val(meta), path("*.tab"), emit: tab - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index 29e5cd99..d03a2c4a 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -26,10 +26,9 @@ process UCSC_WIGTOBIGWIG { output: path "*.bw" , emit: bw - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ wigToBigWig \\ diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 0f15c86c..0ec9741b 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -23,10 +23,9 @@ process UMITOOLS_DEDUP { output: tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index 0a5e6636..d90a3ba8 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -24,10 +24,9 @@ process UMITOOLS_EXTRACT { output: tuple val(meta), path("*.fastq.gz"), emit: reads tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 1dd97c40..2f7c49d6 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -25,10 +25,9 @@ process UNICYCLER { tuple val(meta), path('*.scaffolds.fa'), emit: scaffolds tuple val(meta), path('*.assembly.gfa'), emit: gfa tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ diff --git a/modules/untar/main.nf b/modules/untar/main.nf index 0866dd55..efb9d825 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -23,10 +23,9 @@ process UNTAR { output: path "$untar" , emit: untar - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) untar = archive.toString() - '.tar.gz' """ tar \\ diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index 9e64bb1b..6530bd40 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -24,10 +24,9 @@ process UNZIP { output: path "${archive.baseName}/" , emit: unzipped_archive - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index c4ac3742..e73b8bf1 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -25,10 +25,9 @@ process VARIANTBAM { output: tuple val(meta), path("*.bam") , emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ variant \\ diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index a8d8969c..32607958 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -24,11 +24,11 @@ process VCFTOOLS { // Other optional input files can be utilised in a similar way to below but we do not exhaustively itterate through all // possible options. Instead we leave that to the user. tuple val(meta), path(variant_file) - path(bed) - path(diff_variant_file) + path bed) + path diff_variant_file output: - path("versions.yml") , emit: version + path "versions.yml", emit: versions tuple val(meta), path("*.vcf"), optional:true, emit: vcf tuple val(meta), path("*.bcf"), optional:true, emit: bcf @@ -94,7 +94,6 @@ process VCFTOOLS { tuple val(meta), path("*.diff.switch"), optional:true, emit: diff_switch_error script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def args = options.args.tokenize() diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index e99d99ba..51ae8a32 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -23,10 +23,9 @@ process YARA_INDEX { output: path "yara" , emit: index - path "versions.yml", emit: version + path "versions.yml", emit: versions script: - def software = getSoftwareName(task.process) """ mkdir yara diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 88e3d411..3d69674c 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -24,10 +24,9 @@ process YARA_MAPPER { output: tuple val(meta), path("*.mapped.bam"), emit: bam - path "versions.yml" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ From cf15ece503fd69d5fe630f96841ff7bafd47fcb4 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 1 Oct 2021 14:54:50 +0100 Subject: [PATCH 113/314] Address Jose's comments in #780 (#781) * Remove def software line * Replace version with versions in emit statement * Fix default software names * Address Jose's comments in #780 --- modules/unzip/main.nf | 5 +- modules/vcftools/main.nf | 135 +++++++++++++++++++-------------------- 2 files changed, 69 insertions(+), 71 deletions(-) diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index 6530bd40..f39e75e8 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -23,13 +23,12 @@ process UNZIP { path archive output: - path "${archive.baseName}/" , emit: unzipped_archive - path "versions.yml" , emit: versions + path "${archive.baseName}/", emit: unzipped_archive + path "versions.yml" , emit: versions script: if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } - """ 7za \\ e \\ diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 32607958..768d5a23 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -24,78 +24,77 @@ process VCFTOOLS { // Other optional input files can be utilised in a similar way to below but we do not exhaustively itterate through all // possible options. Instead we leave that to the user. tuple val(meta), path(variant_file) - path bed) - path diff_variant_file + path bed + path diff_variant_file output: - path "versions.yml", emit: versions - - tuple val(meta), path("*.vcf"), optional:true, emit: vcf - tuple val(meta), path("*.bcf"), optional:true, emit: bcf - tuple val(meta), path("*.frq"), optional:true, emit: frq - tuple val(meta), path("*.frq.count"), optional:true, emit: frq_count - tuple val(meta), path("*.idepth"), optional:true, emit: idepth - tuple val(meta), path("*.ldepth"), optional:true, emit: ldepth - tuple val(meta), path("*.ldepth.mean"), optional:true, emit: ldepth_mean - tuple val(meta), path("*.gdepth"), optional:true, emit: gdepth - tuple val(meta), path("*.hap.ld"), optional:true, emit: hap_ld - tuple val(meta), path("*.geno.ld"), optional:true, emit: geno_ld - tuple val(meta), path("*.geno.chisq"), optional:true, emit: geno_chisq - tuple val(meta), path("*.list.hap.ld"), optional:true, emit: list_hap_ld - tuple val(meta), path("*.list.geno.ld"), optional:true, emit: list_geno_ld - tuple val(meta), path("*.interchrom.hap.ld"), optional:true, emit: interchrom_hap_ld - tuple val(meta), path("*.interchrom.geno.ld"), optional:true, emit: interchrom_geno_ld - tuple val(meta), path("*.TsTv"), optional:true, emit: tstv - tuple val(meta), path("*.TsTv.summary"), optional:true, emit: tstv_summary - tuple val(meta), path("*.TsTv.count"), optional:true, emit: tstv_count - tuple val(meta), path("*.TsTv.qual"), optional:true, emit: tstv_qual - tuple val(meta), path("*.FILTER.summary"), optional:true, emit: filter_summary - tuple val(meta), path("*.sites.pi"), optional:true, emit: sites_pi - tuple val(meta), path("*.windowed.pi"), optional:true, emit: windowed_pi - tuple val(meta), path("*.weir.fst"), optional:true, emit: weir_fst - tuple val(meta), path("*.het"), optional:true, emit: heterozygosity - tuple val(meta), path("*.hwe"), optional:true, emit: hwe - tuple val(meta), path("*.Tajima.D"), optional:true, emit: tajima_d - tuple val(meta), path("*.ifreqburden"), optional:true, emit: freq_burden - tuple val(meta), path("*.LROH"), optional:true, emit: lroh - tuple val(meta), path("*.relatedness"), optional:true, emit: relatedness - tuple val(meta), path("*.relatedness2"), optional:true, emit: relatedness2 - tuple val(meta), path("*.lqual"), optional:true, emit: lqual - tuple val(meta), path("*.imiss"), optional:true, emit: missing_individual - tuple val(meta), path("*.lmiss"), optional:true, emit: missing_site - tuple val(meta), path("*.snpden"), optional:true, emit: snp_density - tuple val(meta), path("*.kept.sites"), optional:true, emit: kept_sites - tuple val(meta), path("*.removed.sites"), optional:true, emit: removed_sites - tuple val(meta), path("*.singletons"), optional:true, emit: singeltons - tuple val(meta), path("*.indel.hist"), optional:true, emit: indel_hist - tuple val(meta), path("*.hapcount"), optional:true, emit: hapcount - tuple val(meta), path("*.mendel"), optional:true, emit: mendel - tuple val(meta), path("*.FORMAT"), optional:true, emit: format - tuple val(meta), path("*.INFO"), optional:true, emit: info - tuple val(meta), path("*.012"), optional:true, emit: genotypes_matrix - tuple val(meta), path("*.012.indv"), optional:true, emit: genotypes_matrix_individual - tuple val(meta), path("*.012.pos"), optional:true, emit: genotypes_matrix_position - tuple val(meta), path("*.impute.hap"), optional:true, emit: impute_hap - tuple val(meta), path("*.impute.hap.legend"), optional:true, emit: impute_hap_legend - tuple val(meta), path("*.impute.hap.indv"), optional:true, emit: impute_hap_indv - tuple val(meta), path("*.ldhat.sites"), optional:true, emit: ldhat_sites - tuple val(meta), path("*.ldhat.locs"), optional:true, emit: ldhat_locs - tuple val(meta), path("*.BEAGLE.GL"), optional:true, emit: beagle_gl - tuple val(meta), path("*.BEAGLE.PL"), optional:true, emit: beagle_pl - tuple val(meta), path("*.ped"), optional:true, emit: ped - tuple val(meta), path("*.map"), optional:true, emit: map_ - tuple val(meta), path("*.tped"), optional:true, emit: tped - tuple val(meta), path("*.tfam"), optional:true, emit: tfam - tuple val(meta), path("*.diff.sites_in_files"), optional:true, emit: diff_sites_in_files - tuple val(meta), path("*.diff.indv_in_files"), optional:true, emit: diff_indv_in_files - tuple val(meta), path("*.diff.sites"), optional:true, emit: diff_sites - tuple val(meta), path("*.diff.indv"), optional:true, emit: diff_indv + tuple val(meta), path("*.vcf") , optional:true, emit: vcf + tuple val(meta), path("*.bcf") , optional:true, emit: bcf + tuple val(meta), path("*.frq") , optional:true, emit: frq + tuple val(meta), path("*.frq.count") , optional:true, emit: frq_count + tuple val(meta), path("*.idepth") , optional:true, emit: idepth + tuple val(meta), path("*.ldepth") , optional:true, emit: ldepth + tuple val(meta), path("*.ldepth.mean") , optional:true, emit: ldepth_mean + tuple val(meta), path("*.gdepth") , optional:true, emit: gdepth + tuple val(meta), path("*.hap.ld") , optional:true, emit: hap_ld + tuple val(meta), path("*.geno.ld") , optional:true, emit: geno_ld + tuple val(meta), path("*.geno.chisq") , optional:true, emit: geno_chisq + tuple val(meta), path("*.list.hap.ld") , optional:true, emit: list_hap_ld + tuple val(meta), path("*.list.geno.ld") , optional:true, emit: list_geno_ld + tuple val(meta), path("*.interchrom.hap.ld") , optional:true, emit: interchrom_hap_ld + tuple val(meta), path("*.interchrom.geno.ld") , optional:true, emit: interchrom_geno_ld + tuple val(meta), path("*.TsTv") , optional:true, emit: tstv + tuple val(meta), path("*.TsTv.summary") , optional:true, emit: tstv_summary + tuple val(meta), path("*.TsTv.count") , optional:true, emit: tstv_count + tuple val(meta), path("*.TsTv.qual") , optional:true, emit: tstv_qual + tuple val(meta), path("*.FILTER.summary") , optional:true, emit: filter_summary + tuple val(meta), path("*.sites.pi") , optional:true, emit: sites_pi + tuple val(meta), path("*.windowed.pi") , optional:true, emit: windowed_pi + tuple val(meta), path("*.weir.fst") , optional:true, emit: weir_fst + tuple val(meta), path("*.het") , optional:true, emit: heterozygosity + tuple val(meta), path("*.hwe") , optional:true, emit: hwe + tuple val(meta), path("*.Tajima.D") , optional:true, emit: tajima_d + tuple val(meta), path("*.ifreqburden") , optional:true, emit: freq_burden + tuple val(meta), path("*.LROH") , optional:true, emit: lroh + tuple val(meta), path("*.relatedness") , optional:true, emit: relatedness + tuple val(meta), path("*.relatedness2") , optional:true, emit: relatedness2 + tuple val(meta), path("*.lqual") , optional:true, emit: lqual + tuple val(meta), path("*.imiss") , optional:true, emit: missing_individual + tuple val(meta), path("*.lmiss") , optional:true, emit: missing_site + tuple val(meta), path("*.snpden") , optional:true, emit: snp_density + tuple val(meta), path("*.kept.sites") , optional:true, emit: kept_sites + tuple val(meta), path("*.removed.sites") , optional:true, emit: removed_sites + tuple val(meta), path("*.singletons") , optional:true, emit: singeltons + tuple val(meta), path("*.indel.hist") , optional:true, emit: indel_hist + tuple val(meta), path("*.hapcount") , optional:true, emit: hapcount + tuple val(meta), path("*.mendel") , optional:true, emit: mendel + tuple val(meta), path("*.FORMAT") , optional:true, emit: format + tuple val(meta), path("*.INFO") , optional:true, emit: info + tuple val(meta), path("*.012") , optional:true, emit: genotypes_matrix + tuple val(meta), path("*.012.indv") , optional:true, emit: genotypes_matrix_individual + tuple val(meta), path("*.012.pos") , optional:true, emit: genotypes_matrix_position + tuple val(meta), path("*.impute.hap") , optional:true, emit: impute_hap + tuple val(meta), path("*.impute.hap.legend") , optional:true, emit: impute_hap_legend + tuple val(meta), path("*.impute.hap.indv") , optional:true, emit: impute_hap_indv + tuple val(meta), path("*.ldhat.sites") , optional:true, emit: ldhat_sites + tuple val(meta), path("*.ldhat.locs") , optional:true, emit: ldhat_locs + tuple val(meta), path("*.BEAGLE.GL") , optional:true, emit: beagle_gl + tuple val(meta), path("*.BEAGLE.PL") , optional:true, emit: beagle_pl + tuple val(meta), path("*.ped") , optional:true, emit: ped + tuple val(meta), path("*.map") , optional:true, emit: map_ + tuple val(meta), path("*.tped") , optional:true, emit: tped + tuple val(meta), path("*.tfam") , optional:true, emit: tfam + tuple val(meta), path("*.diff.sites_in_files") , optional:true, emit: diff_sites_in_files + tuple val(meta), path("*.diff.indv_in_files") , optional:true, emit: diff_indv_in_files + tuple val(meta), path("*.diff.sites") , optional:true, emit: diff_sites + tuple val(meta), path("*.diff.indv") , optional:true, emit: diff_indv tuple val(meta), path("*.diff.discordance.matrix"), optional:true, emit: diff_discd_matrix - tuple val(meta), path("*.diff.switch"), optional:true, emit: diff_switch_error + tuple val(meta), path("*.diff.switch") , optional:true, emit: diff_switch_error + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = options.args.tokenize() def bed_arg = (options.args.contains('--bed')) ? "--bed ${bed}" : (options.args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : @@ -121,7 +120,7 @@ process VCFTOOLS { --out $prefix \\ ${args.join(' ')} \\ $bed_arg \\ - $diff_variant_arg \\ + $diff_variant_arg cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From 49da8642876ae4d91128168cd0db4f1c858d7792 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Sun, 3 Oct 2021 08:20:26 +0100 Subject: [PATCH 114/314] Update versions key in meta.yml for all modules (#787) --- modules/abacas/meta.yml | 4 ++-- modules/adapterremoval/meta.yml | 4 ++-- modules/agrvate/meta.yml | 4 ++-- modules/allelecounter/meta.yml | 4 ++-- modules/amps/meta.yml | 4 ++-- modules/arriba/meta.yml | 4 ++-- modules/artic/guppyplex/meta.yml | 4 ++-- modules/artic/minion/meta.yml | 4 ++-- modules/bamaligncleaner/meta.yml | 4 ++-- modules/bandage/image/meta.yml | 4 ++-- modules/bbmap/align/meta.yml | 4 ++-- modules/bbmap/bbduk/meta.yml | 4 ++-- modules/bbmap/bbsplit/meta.yml | 4 ++-- modules/bbmap/index/meta.yml | 4 ++-- modules/bcftools/concat/meta.yml | 4 ++-- modules/bcftools/consensus/meta.yml | 4 ++-- modules/bcftools/filter/meta.yml | 4 ++-- modules/bcftools/isec/meta.yml | 4 ++-- modules/bcftools/merge/meta.yml | 4 ++-- modules/bcftools/mpileup/meta.yml | 4 ++-- modules/bcftools/norm/meta.yml | 4 ++-- modules/bcftools/query/meta.yml | 4 ++-- modules/bcftools/reheader/meta.yml | 4 ++-- modules/bcftools/stats/meta.yml | 4 ++-- modules/bcftools/view/meta.yml | 4 ++-- modules/bedtools/bamtobed/meta.yml | 4 ++-- modules/bedtools/complement/meta.yml | 4 ++-- modules/bedtools/genomecov/meta.yml | 4 ++-- modules/bedtools/getfasta/meta.yml | 4 ++-- modules/bedtools/intersect/meta.yml | 4 ++-- modules/bedtools/makewindows/meta.yml | 4 ++-- modules/bedtools/maskfasta/meta.yml | 4 ++-- modules/bedtools/merge/meta.yml | 4 ++-- modules/bedtools/slop/meta.yml | 4 ++-- modules/bedtools/sort/meta.yml | 4 ++-- modules/bedtools/subtract/meta.yml | 4 ++-- modules/bismark/align/meta.yml | 4 ++-- modules/bismark/deduplicate/meta.yml | 4 ++-- modules/bismark/genomepreparation/meta.yml | 4 ++-- modules/bismark/methylationextractor/meta.yml | 4 ++-- modules/bismark/report/meta.yml | 4 ++-- modules/bismark/summary/meta.yml | 4 ++-- modules/blast/blastn/meta.yml | 4 ++-- modules/blast/makeblastdb/meta.yml | 4 ++-- modules/bowtie/align/meta.yml | 4 ++-- modules/bowtie/build/meta.yml | 4 ++-- modules/bowtie2/align/meta.yml | 4 ++-- modules/bowtie2/build/meta.yml | 4 ++-- modules/bwa/aln/meta.yml | 4 ++-- modules/bwa/index/meta.yml | 4 ++-- modules/bwa/mem/meta.yml | 4 ++-- modules/bwa/sampe/meta.yml | 4 ++-- modules/bwa/samse/meta.yml | 4 ++-- modules/bwamem2/index/meta.yml | 4 ++-- modules/bwamem2/mem/meta.yml | 4 ++-- modules/bwameth/align/meta.yml | 4 ++-- modules/bwameth/index/meta.yml | 4 ++-- modules/cat/cat/meta.yml | 4 ++-- modules/cat/fastq/meta.yml | 5 +++++ modules/chromap/chromap/meta.yml | 4 ++-- modules/chromap/index/meta.yml | 4 ++-- modules/cnvkit/meta.yml | 4 ++-- modules/cooler/digest/meta.yml | 4 ++-- modules/cooler/dump/meta.yml | 4 ++-- modules/custom/dumpsoftwareversions/meta.yml | 4 ++-- modules/cutadapt/meta.yml | 4 ++-- modules/damageprofiler/meta.yml | 4 ++-- modules/deeptools/computematrix/meta.yml | 4 ++-- modules/deeptools/plotfingerprint/meta.yml | 4 ++-- modules/deeptools/plotheatmap/meta.yml | 4 ++-- modules/deeptools/plotprofile/meta.yml | 4 ++-- modules/delly/call/meta.yml | 4 ++-- modules/diamond/blastp/meta.yml | 4 ++-- modules/diamond/blastx/meta.yml | 4 ++-- modules/diamond/makedb/meta.yml | 4 ++-- modules/dragonflye/meta.yml | 4 ++-- modules/dshbio/exportsegments/meta.yml | 4 ++-- modules/dshbio/filterbed/meta.yml | 4 ++-- modules/dshbio/filtergff3/meta.yml | 4 ++-- modules/dshbio/splitbed/meta.yml | 4 ++-- modules/dshbio/splitgff3/meta.yml | 4 ++-- modules/ensemblvep/meta.yml | 4 ++-- modules/expansionhunter/meta.yml | 4 ++-- modules/fastani/meta.yml | 4 ++-- modules/fastp/meta.yml | 4 ++-- modules/fastqc/meta.yml | 4 ++-- modules/fasttree/meta.yml | 4 ++-- modules/fgbio/callmolecularconsensusreads/meta.yml | 4 ++-- modules/fgbio/sortbam/meta.yml | 4 ++-- modules/flash/meta.yml | 4 ++-- modules/gatk4/applybqsr/meta.yml | 4 ++-- modules/gatk4/baserecalibrator/meta.yml | 4 ++-- modules/gatk4/bedtointervallist/meta.yml | 4 ++-- modules/gatk4/createsequencedictionary/meta.yml | 4 ++-- modules/gatk4/fastqtosam/meta.yml | 4 ++-- modules/gatk4/getpileupsummaries/meta.yml | 4 ++-- modules/gatk4/haplotypecaller/meta.yml | 4 ++-- modules/gatk4/intervallisttools/meta.yml | 4 ++-- modules/gatk4/markduplicates/meta.yml | 4 ++-- modules/gatk4/mergebamalignment/meta.yml | 4 ++-- modules/gatk4/mergevcfs/meta.yml | 4 ++-- modules/gatk4/mutect2/meta.yml | 4 ++-- modules/gatk4/revertsam/meta.yml | 4 ++-- modules/gatk4/samtofastq/meta.yml | 4 ++-- modules/gatk4/splitncigarreads/meta.yml | 4 ++-- modules/gatk4/variantfiltration/meta.yml | 4 ++-- modules/genmap/index/meta.yml | 4 ++-- modules/genmap/mappability/meta.yml | 4 ++-- modules/gffread/meta.yml | 4 ++-- modules/glnexus/meta.yml | 4 ++-- modules/graphmap2/align/meta.yml | 4 ++-- modules/graphmap2/index/meta.yml | 4 ++-- modules/gubbins/meta.yml | 4 ++-- modules/gunzip/meta.yml | 4 ++-- modules/hifiasm/meta.yml | 4 ++-- modules/hisat2/align/meta.yml | 4 ++-- modules/hisat2/build/meta.yml | 4 ++-- modules/hisat2/extractsplicesites/meta.yml | 4 ++-- modules/hmmer/hmmalign/meta.yml | 4 ++-- modules/homer/annotatepeaks/meta.yml | 4 ++-- modules/homer/findpeaks/meta.yml | 4 ++-- modules/homer/maketagdirectory/meta.yml | 4 ++-- modules/homer/makeucscfile/meta.yml | 4 ++-- modules/iqtree/meta.yml | 4 ++-- modules/ivar/consensus/meta.yml | 4 ++-- modules/ivar/trim/meta.yml | 4 ++-- modules/ivar/variants/meta.yml | 4 ++-- modules/kallisto/index/meta.yml | 4 ++-- modules/kallistobustools/count/meta.yml | 4 ++-- modules/kallistobustools/ref/meta.yml | 4 ++-- modules/kleborate/meta.yml | 4 ++-- modules/kraken2/kraken2/meta.yml | 4 ++-- modules/last/dotplot/meta.yml | 4 ++-- modules/last/lastal/meta.yml | 4 ++-- modules/last/lastdb/meta.yml | 4 ++-- modules/last/mafconvert/meta.yml | 4 ++-- modules/last/mafswap/meta.yml | 4 ++-- modules/last/postmask/meta.yml | 4 ++-- modules/last/split/meta.yml | 4 ++-- modules/last/train/meta.yml | 4 ++-- modules/lima/meta.yml | 6 +++--- modules/lofreq/call/meta.yml | 4 ++-- modules/lofreq/callparallel/meta.yml | 4 ++-- modules/lofreq/filter/meta.yml | 4 ++-- modules/lofreq/indelqual/meta.yml | 4 ++-- modules/malt/build/meta.yml | 4 ++-- modules/malt/run/meta.yml | 4 ++-- modules/maltextract/meta.yml | 4 ++-- modules/mash/sketch/main.nf | 1 + modules/mash/sketch/meta.yml | 6 +++++- modules/metaphlan3/meta.yml | 4 ++-- modules/methyldackel/extract/meta.yml | 4 ++-- modules/methyldackel/mbias/meta.yml | 4 ++-- modules/minia/meta.yml | 4 ++-- modules/minimap2/align/meta.yml | 4 ++-- modules/minimap2/index/meta.yml | 4 ++-- modules/mosdepth/meta.yml | 4 ++-- modules/msisensor/msi/meta.yml | 4 ++-- modules/msisensor/scan/meta.yml | 4 ++-- modules/multiqc/meta.yml | 4 ++-- modules/muscle/meta.yml | 4 ++-- modules/nanolyse/meta.yml | 4 ++-- modules/nanoplot/meta.yml | 4 ++-- modules/nextclade/meta.yml | 4 ++-- modules/optitype/meta.yml | 4 ++-- modules/pairix/meta.yml | 4 ++-- modules/pairtools/dedup/meta.yml | 4 ++-- modules/pairtools/flip/meta.yml | 4 ++-- modules/pairtools/parse/meta.yml | 4 ++-- modules/pairtools/restrict/meta.yml | 4 ++-- modules/pairtools/select/meta.yml | 4 ++-- modules/pairtools/sort/meta.yml | 4 ++-- modules/pangolin/meta.yml | 4 ++-- modules/pbccs/meta.yml | 4 ++-- modules/picard/collectmultiplemetrics/meta.yml | 4 ++-- modules/picard/collectwgsmetrics/meta.yml | 4 ++-- modules/picard/filtersamreads/meta.yml | 4 ++-- modules/picard/markduplicates/meta.yml | 4 ++-- modules/picard/mergesamfiles/meta.yml | 4 ++-- modules/picard/sortsam/meta.yml | 4 ++-- modules/plasmidid/meta.yml | 4 ++-- modules/plink/vcf/meta.yml | 4 ++-- modules/preseq/lcextrap/meta.yml | 4 ++-- modules/prodigal/meta.yml | 4 ++-- modules/prokka/meta.yml | 4 ++-- modules/pycoqc/meta.yml | 4 ++-- modules/pydamage/analyze/meta.yml | 4 ++-- modules/pydamage/filter/meta.yml | 4 ++-- modules/qcat/meta.yml | 4 ++-- modules/qualimap/bamqc/meta.yml | 4 ++-- modules/quast/meta.yml | 4 ++-- modules/rapidnj/meta.yml | 4 ++-- modules/rasusa/meta.yml | 4 ++-- modules/raxmlng/meta.yml | 4 ++-- modules/rsem/calculateexpression/meta.yml | 4 ++-- modules/rsem/preparereference/meta.yml | 4 ++-- modules/rseqc/bamstat/meta.yml | 4 ++-- modules/rseqc/inferexperiment/meta.yml | 4 ++-- modules/rseqc/innerdistance/meta.yml | 4 ++-- modules/rseqc/junctionannotation/meta.yml | 4 ++-- modules/rseqc/junctionsaturation/meta.yml | 4 ++-- modules/rseqc/readdistribution/meta.yml | 4 ++-- modules/rseqc/readduplication/meta.yml | 4 ++-- modules/salmon/index/meta.yml | 4 ++-- modules/salmon/quant/meta.yml | 4 ++-- modules/samtools/ampliconclip/meta.yml | 4 ++-- modules/samtools/faidx/meta.yml | 4 ++-- modules/samtools/fastq/meta.yml | 4 ++-- modules/samtools/flagstat/meta.yml | 4 ++-- modules/samtools/idxstats/meta.yml | 4 ++-- modules/samtools/index/meta.yml | 4 ++-- modules/samtools/merge/meta.yml | 4 ++-- modules/samtools/mpileup/meta.yml | 4 ++-- modules/samtools/sort/meta.yml | 4 ++-- modules/samtools/stats/meta.yml | 4 ++-- modules/samtools/view/meta.yml | 4 ++-- modules/seacr/callpeak/meta.yml | 4 ++-- modules/seqkit/split2/meta.yml | 4 ++-- modules/seqtk/sample/meta.yml | 4 ++-- modules/seqtk/subseq/meta.yml | 4 ++-- modules/sequenzautils/bam2seqz/meta.yml | 4 ++-- modules/sequenzautils/gcwiggle/meta.yml | 4 ++-- modules/seqwish/induce/meta.yml | 4 ++-- modules/shovill/meta.yml | 4 ++-- modules/snpdists/meta.yml | 4 ++-- modules/snpeff/meta.yml | 4 ++-- modules/snpsites/meta.yml | 4 ++-- modules/spades/meta.yml | 4 ++-- modules/staphopiasccmec/meta.yml | 4 ++-- modules/star/align/meta.yml | 4 ++-- modules/star/genomegenerate/meta.yml | 4 ++-- modules/strelka/germline/meta.yml | 4 ++-- modules/stringtie/merge/meta.yml | 5 +++++ modules/stringtie/stringtie/meta.yml | 4 ++-- modules/subread/featurecounts/meta.yml | 4 ++-- modules/tabix/bgzip/meta.yml | 4 ++-- modules/tabix/bgziptabix/meta.yml | 4 ++-- modules/tabix/tabix/meta.yml | 4 ++-- modules/tiddit/sv/meta.yml | 4 ++-- modules/trimgalore/meta.yml | 4 ++-- modules/ucsc/bed12tobigbed/meta.yml | 4 ++-- modules/ucsc/bedclip/meta.yml | 4 ++-- modules/ucsc/bedgraphtobigwig/meta.yml | 4 ++-- modules/ucsc/bigwigaverageoverbed/meta.yml | 4 ++-- modules/ucsc/wigtobigwig/meta.yml | 4 ++-- modules/unicycler/meta.yml | 8 ++++---- modules/untar/meta.yml | 4 ++-- modules/unzip/meta.yml | 8 ++++---- modules/variantbam/meta.yml | 4 ++-- modules/vcftools/meta.yml | 4 ++-- modules/yara/index/meta.yml | 4 ++-- modules/yara/mapper/meta.yml | 4 ++-- 252 files changed, 517 insertions(+), 502 deletions(-) diff --git a/modules/abacas/meta.yml b/modules/abacas/meta.yml index d8c45628..039fb0be 100644 --- a/modules/abacas/meta.yml +++ b/modules/abacas/meta.yml @@ -48,9 +48,9 @@ output: 'test.abacas.MULTIFASTA.fa' ] pattern: "*.{abacas}*" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index 6282436a..05386fd8 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -41,9 +41,9 @@ output: type: file description: AdapterRemoval log file pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/agrvate/meta.yml b/modules/agrvate/meta.yml index bd27050a..a8ab5816 100644 --- a/modules/agrvate/meta.yml +++ b/modules/agrvate/meta.yml @@ -38,9 +38,9 @@ output: type: directory description: Results of the agrvate assessement pattern: "*-results" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/allelecounter/meta.yml b/modules/allelecounter/meta.yml index 67b398f3..a15f3eac 100644 --- a/modules/allelecounter/meta.yml +++ b/modules/allelecounter/meta.yml @@ -39,9 +39,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - alleleCount: type: file diff --git a/modules/amps/meta.yml b/modules/amps/meta.yml index 62844f6a..43d68599 100644 --- a/modules/amps/meta.yml +++ b/modules/amps/meta.yml @@ -41,9 +41,9 @@ input: pattern: "def_anc|default|scan|ancient|crawl" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - json: type: file diff --git a/modules/arriba/meta.yml b/modules/arriba/meta.yml index ddd2c75b..6ca16dab 100644 --- a/modules/arriba/meta.yml +++ b/modules/arriba/meta.yml @@ -37,9 +37,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - fusions: type: file diff --git a/modules/artic/guppyplex/meta.yml b/modules/artic/guppyplex/meta.yml index 45ec7138..5056f908 100644 --- a/modules/artic/guppyplex/meta.yml +++ b/modules/artic/guppyplex/meta.yml @@ -34,9 +34,9 @@ output: type: file description: Aggregated FastQ files pattern: "*.{fastq.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/artic/minion/meta.yml b/modules/artic/minion/meta.yml index 77f325e5..464e1dc7 100644 --- a/modules/artic/minion/meta.yml +++ b/modules/artic/minion/meta.yml @@ -103,9 +103,9 @@ output: type: file description: JSON file for MultiQC pattern: "*.json" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/bamaligncleaner/meta.yml b/modules/bamaligncleaner/meta.yml index c236c0ea..d1e171f7 100644 --- a/modules/bamaligncleaner/meta.yml +++ b/modules/bamaligncleaner/meta.yml @@ -27,9 +27,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/bandage/image/meta.yml b/modules/bandage/image/meta.yml index f655cae4..65f47664 100644 --- a/modules/bandage/image/meta.yml +++ b/modules/bandage/image/meta.yml @@ -35,9 +35,9 @@ output: type: file description: Bandage image in SVG format pattern: "*.svg" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/bbmap/align/meta.yml b/modules/bbmap/align/meta.yml index bb52f06e..fe4d4334 100644 --- a/modules/bbmap/align/meta.yml +++ b/modules/bbmap/align/meta.yml @@ -39,9 +39,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/bbmap/bbduk/meta.yml b/modules/bbmap/bbduk/meta.yml index a1ab789c..50ab6ed4 100644 --- a/modules/bbmap/bbduk/meta.yml +++ b/modules/bbmap/bbduk/meta.yml @@ -39,9 +39,9 @@ output: type: file description: The trimmed/modified fastq reads pattern: "*fastq.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - log: type: file diff --git a/modules/bbmap/bbsplit/meta.yml b/modules/bbmap/bbsplit/meta.yml index 2eb3a6c9..2e3d07c0 100644 --- a/modules/bbmap/bbsplit/meta.yml +++ b/modules/bbmap/bbsplit/meta.yml @@ -49,9 +49,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: directory diff --git a/modules/bbmap/index/meta.yml b/modules/bbmap/index/meta.yml index 1df990b2..0b3e5778 100644 --- a/modules/bbmap/index/meta.yml +++ b/modules/bbmap/index/meta.yml @@ -20,9 +20,9 @@ input: pattern: "*.{fna,fa,fasta}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - db: type: directory diff --git a/modules/bcftools/concat/meta.yml b/modules/bcftools/concat/meta.yml index 81701288..e394d18d 100644 --- a/modules/bcftools/concat/meta.yml +++ b/modules/bcftools/concat/meta.yml @@ -34,9 +34,9 @@ output: type: file description: VCF concatenated output file pattern: "*.{vcf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/consensus/meta.yml b/modules/bcftools/consensus/meta.yml index 4241e441..30f4910a 100644 --- a/modules/bcftools/consensus/meta.yml +++ b/modules/bcftools/consensus/meta.yml @@ -39,9 +39,9 @@ output: type: file description: FASTA reference consensus file pattern: "*.{fasta,fa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/filter/meta.yml b/modules/bcftools/filter/meta.yml index 6842b1f8..433b203d 100644 --- a/modules/bcftools/filter/meta.yml +++ b/modules/bcftools/filter/meta.yml @@ -31,9 +31,9 @@ output: type: file description: VCF filtered output file pattern: "*.{vcf}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/isec/meta.yml b/modules/bcftools/isec/meta.yml index 7a75a3af..6a482257 100644 --- a/modules/bcftools/isec/meta.yml +++ b/modules/bcftools/isec/meta.yml @@ -39,9 +39,9 @@ output: type: directory description: Folder containing the set operations results perform on the vcf files pattern: "${prefix}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/merge/meta.yml b/modules/bcftools/merge/meta.yml index 262d883a..056ea37d 100644 --- a/modules/bcftools/merge/meta.yml +++ b/modules/bcftools/merge/meta.yml @@ -37,9 +37,9 @@ output: type: file description: VCF merged output file pattern: "*.{vcf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/mpileup/meta.yml b/modules/bcftools/mpileup/meta.yml index 44f2b81e..49f02a40 100644 --- a/modules/bcftools/mpileup/meta.yml +++ b/modules/bcftools/mpileup/meta.yml @@ -43,9 +43,9 @@ output: type: file description: Text output file containing stats pattern: "*{stats.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/norm/meta.yml b/modules/bcftools/norm/meta.yml index f2534452..760186dc 100644 --- a/modules/bcftools/norm/meta.yml +++ b/modules/bcftools/norm/meta.yml @@ -37,9 +37,9 @@ output: type: file description: VCF normalized output file pattern: "*.{vcf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/query/meta.yml b/modules/bcftools/query/meta.yml index 57570c64..12b11216 100644 --- a/modules/bcftools/query/meta.yml +++ b/modules/bcftools/query/meta.yml @@ -53,9 +53,9 @@ output: type: file description: VCF query output file pattern: "*.{vcf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bcftools/reheader/meta.yml b/modules/bcftools/reheader/meta.yml index 823e3279..6d7c9f97 100644 --- a/modules/bcftools/reheader/meta.yml +++ b/modules/bcftools/reheader/meta.yml @@ -38,9 +38,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/bcftools/stats/meta.yml b/modules/bcftools/stats/meta.yml index 33675cb9..78294ff7 100644 --- a/modules/bcftools/stats/meta.yml +++ b/modules/bcftools/stats/meta.yml @@ -32,9 +32,9 @@ output: type: file description: Text output file containing stats pattern: "*_{stats.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bcftools/view/meta.yml b/modules/bcftools/view/meta.yml index e37e41b5..638a4e4f 100644 --- a/modules/bcftools/view/meta.yml +++ b/modules/bcftools/view/meta.yml @@ -54,9 +54,9 @@ output: type: file description: VCF normalized output file pattern: "*.{vcf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/bedtools/bamtobed/meta.yml b/modules/bedtools/bamtobed/meta.yml index 5d7889ea..0eaf3e2a 100644 --- a/modules/bedtools/bamtobed/meta.yml +++ b/modules/bedtools/bamtobed/meta.yml @@ -28,9 +28,9 @@ output: type: file description: Bed file containing genomic intervals. pattern: "*.{bed}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/bedtools/complement/meta.yml b/modules/bedtools/complement/meta.yml index 183c9e8f..02ddca29 100644 --- a/modules/bedtools/complement/meta.yml +++ b/modules/bedtools/complement/meta.yml @@ -32,9 +32,9 @@ output: type: file description: Bed file with all genomic intervals that are not covered by at least one record from the input file. pattern: "*.{bed}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/genomecov/meta.yml b/modules/bedtools/genomecov/meta.yml index 7f28c185..bc49ab03 100644 --- a/modules/bedtools/genomecov/meta.yml +++ b/modules/bedtools/genomecov/meta.yml @@ -35,9 +35,9 @@ output: type: file description: Computed genome coverage file pattern: "*.${extension}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/getfasta/meta.yml b/modules/bedtools/getfasta/meta.yml index 1ddd4bbb..89fbea54 100644 --- a/modules/bedtools/getfasta/meta.yml +++ b/modules/bedtools/getfasta/meta.yml @@ -24,9 +24,9 @@ output: type: file description: Output fasta file with extracted sequences pattern: "*.{fa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bedtools/intersect/meta.yml b/modules/bedtools/intersect/meta.yml index 2c229884..a14bf515 100644 --- a/modules/bedtools/intersect/meta.yml +++ b/modules/bedtools/intersect/meta.yml @@ -35,9 +35,9 @@ output: type: file description: File containing the description of overlaps found between the two features pattern: "*.${extension}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/makewindows/meta.yml b/modules/bedtools/makewindows/meta.yml index dcddbc75..7d86e127 100644 --- a/modules/bedtools/makewindows/meta.yml +++ b/modules/bedtools/makewindows/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - tab: type: file diff --git a/modules/bedtools/maskfasta/meta.yml b/modules/bedtools/maskfasta/meta.yml index 0474118b..428d6f57 100644 --- a/modules/bedtools/maskfasta/meta.yml +++ b/modules/bedtools/maskfasta/meta.yml @@ -34,9 +34,9 @@ output: type: file description: Output masked fasta file pattern: "*.{fa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bedtools/merge/meta.yml b/modules/bedtools/merge/meta.yml index 0618c0ff..39e79cbd 100644 --- a/modules/bedtools/merge/meta.yml +++ b/modules/bedtools/merge/meta.yml @@ -28,9 +28,9 @@ output: type: file description: Overlapped bed file with combined features pattern: "*.{bed}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/slop/meta.yml b/modules/bedtools/slop/meta.yml index bdcdc1d2..709d88c3 100644 --- a/modules/bedtools/slop/meta.yml +++ b/modules/bedtools/slop/meta.yml @@ -28,9 +28,9 @@ output: type: file description: Slopped BED file pattern: "*.{bed}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/sort/meta.yml b/modules/bedtools/sort/meta.yml index d09886a5..a0332787 100644 --- a/modules/bedtools/sort/meta.yml +++ b/modules/bedtools/sort/meta.yml @@ -28,9 +28,9 @@ output: type: file description: Sorted BED file pattern: "*.{bed}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@Emiller88" diff --git a/modules/bedtools/subtract/meta.yml b/modules/bedtools/subtract/meta.yml index 8c99b80a..e13057bb 100644 --- a/modules/bedtools/subtract/meta.yml +++ b/modules/bedtools/subtract/meta.yml @@ -36,9 +36,9 @@ output: type: file description: File containing the difference between the two sets of features patters: "*.bed" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/bismark/align/meta.yml b/modules/bismark/align/meta.yml index d9bacf04..92a3b1ec 100644 --- a/modules/bismark/align/meta.yml +++ b/modules/bismark/align/meta.yml @@ -50,9 +50,9 @@ output: type: file description: Bismark alignment reports pattern: "*{report.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/deduplicate/meta.yml b/modules/bismark/deduplicate/meta.yml index 11d8797b..d19a915f 100644 --- a/modules/bismark/deduplicate/meta.yml +++ b/modules/bismark/deduplicate/meta.yml @@ -43,9 +43,9 @@ output: type: file description: Bismark deduplication reports pattern: "*.{deduplication_report.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/genomepreparation/meta.yml b/modules/bismark/genomepreparation/meta.yml index 6d267343..7712d7c2 100644 --- a/modules/bismark/genomepreparation/meta.yml +++ b/modules/bismark/genomepreparation/meta.yml @@ -28,9 +28,9 @@ output: type: dir description: Bismark genome index directory pattern: "BismarkIndex" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/methylationextractor/meta.yml b/modules/bismark/methylationextractor/meta.yml index 2ae7cf64..9fa0f4f4 100644 --- a/modules/bismark/methylationextractor/meta.yml +++ b/modules/bismark/methylationextractor/meta.yml @@ -58,9 +58,9 @@ output: type: file description: Text file containing methylation bias information pattern: "*.{M-bias.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/report/meta.yml b/modules/bismark/report/meta.yml index 57b8c746..889d1227 100644 --- a/modules/bismark/report/meta.yml +++ b/modules/bismark/report/meta.yml @@ -51,9 +51,9 @@ output: type: file description: Bismark reports pattern: "*.{html,txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bismark/summary/meta.yml b/modules/bismark/summary/meta.yml index 37d8951b..10f71fe4 100644 --- a/modules/bismark/summary/meta.yml +++ b/modules/bismark/summary/meta.yml @@ -45,9 +45,9 @@ output: type: file description: Bismark summary pattern: "*.{html,txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/blast/blastn/meta.yml b/modules/blast/blastn/meta.yml index b4a832ea..d19d3df6 100644 --- a/modules/blast/blastn/meta.yml +++ b/modules/blast/blastn/meta.yml @@ -31,9 +31,9 @@ output: type: file description: File containing blastn hits pattern: "*.{blastn.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/blast/makeblastdb/meta.yml b/modules/blast/makeblastdb/meta.yml index 9a5957db..545cc2a0 100644 --- a/modules/blast/makeblastdb/meta.yml +++ b/modules/blast/makeblastdb/meta.yml @@ -21,9 +21,9 @@ output: type: directory description: Output directory containing blast database files pattern: "*" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bowtie/align/meta.yml b/modules/bowtie/align/meta.yml index e5ada585..73c65631 100644 --- a/modules/bowtie/align/meta.yml +++ b/modules/bowtie/align/meta.yml @@ -33,9 +33,9 @@ output: type: file description: Output BAM file containing read alignments pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - fastq: type: file diff --git a/modules/bowtie/build/meta.yml b/modules/bowtie/build/meta.yml index e97068f6..aa39f32e 100644 --- a/modules/bowtie/build/meta.yml +++ b/modules/bowtie/build/meta.yml @@ -22,9 +22,9 @@ output: type: file description: Bowtie genome index files pattern: "*.ebwt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/bowtie2/align/meta.yml b/modules/bowtie2/align/meta.yml index cba6eacf..f9d54d87 100644 --- a/modules/bowtie2/align/meta.yml +++ b/modules/bowtie2/align/meta.yml @@ -33,9 +33,9 @@ output: type: file description: Output BAM file containing read alignments pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - fastq: type: file diff --git a/modules/bowtie2/build/meta.yml b/modules/bowtie2/build/meta.yml index 70045f3c..4531d079 100644 --- a/modules/bowtie2/build/meta.yml +++ b/modules/bowtie2/build/meta.yml @@ -23,9 +23,9 @@ output: type: file description: Bowtie2 genome index files pattern: "*.bt2" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml index b3797eac..d4a2b19d 100644 --- a/modules/bwa/aln/meta.yml +++ b/modules/bwa/aln/meta.yml @@ -41,9 +41,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - sai: type: file diff --git a/modules/bwa/index/meta.yml b/modules/bwa/index/meta.yml index 43ffd73d..c3c0a8d8 100644 --- a/modules/bwa/index/meta.yml +++ b/modules/bwa/index/meta.yml @@ -22,9 +22,9 @@ output: type: file description: BWA genome index files pattern: "*.{amb,ann,bwt,pac,sa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 618f20d5..66238507 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -36,9 +36,9 @@ output: type: file description: Output BAM file containing read alignments pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/bwa/sampe/meta.yml b/modules/bwa/sampe/meta.yml index aeb592f7..ec2dfff5 100644 --- a/modules/bwa/sampe/meta.yml +++ b/modules/bwa/sampe/meta.yml @@ -45,9 +45,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/bwa/samse/meta.yml b/modules/bwa/samse/meta.yml index 3c44741d..1e7ef335 100644 --- a/modules/bwa/samse/meta.yml +++ b/modules/bwa/samse/meta.yml @@ -46,9 +46,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/bwamem2/index/meta.yml b/modules/bwamem2/index/meta.yml index ee84ccfc..1b36be8d 100644 --- a/modules/bwamem2/index/meta.yml +++ b/modules/bwamem2/index/meta.yml @@ -21,9 +21,9 @@ output: type: file description: BWA genome index files pattern: "*.{0132,amb,ann,bwt.2bit.64,pac}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 434fc7ca..2fb4449e 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -36,9 +36,9 @@ output: type: file description: Output BAM file containing read alignments pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/bwameth/align/meta.yml b/modules/bwameth/align/meta.yml index 03bd66f7..11fc9949 100644 --- a/modules/bwameth/align/meta.yml +++ b/modules/bwameth/align/meta.yml @@ -43,9 +43,9 @@ output: type: file description: Output BAM file containing read alignments pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/bwameth/index/meta.yml b/modules/bwameth/index/meta.yml index b07dbde5..c96fbfbb 100644 --- a/modules/bwameth/index/meta.yml +++ b/modules/bwameth/index/meta.yml @@ -24,9 +24,9 @@ output: type: dir description: Directory containing bwameth genome index pattern: "index" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml index d283107e..f1a46ca3 100644 --- a/modules/cat/cat/meta.yml +++ b/modules/cat/cat/meta.yml @@ -21,9 +21,9 @@ input: description: Full name of output file with or without .gz extension output: - - version: + - versions: type: file - description: File containing version of the pigz software + description: File containing software versions pattern: "versions.yml" - file_out: type: file diff --git a/modules/cat/fastq/meta.yml b/modules/cat/fastq/meta.yml index e7b8eebe..6c6c397e 100644 --- a/modules/cat/fastq/meta.yml +++ b/modules/cat/fastq/meta.yml @@ -28,6 +28,11 @@ output: type: file description: Merged fastq file pattern: "*.{merged.fastq.gz}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + authors: - "@joseespinosa" - "@drpatelh" diff --git a/modules/chromap/chromap/meta.yml b/modules/chromap/chromap/meta.yml index d52e4202..57936c67 100644 --- a/modules/chromap/chromap/meta.yml +++ b/modules/chromap/chromap/meta.yml @@ -63,9 +63,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bed: type: file diff --git a/modules/chromap/index/meta.yml b/modules/chromap/index/meta.yml index 0b3aba75..a6a18fe9 100644 --- a/modules/chromap/index/meta.yml +++ b/modules/chromap/index/meta.yml @@ -20,9 +20,9 @@ input: description: Fasta reference file. output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: file diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/meta.yml index 5094308f..30c1b588 100755 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/meta.yml @@ -75,9 +75,9 @@ output: type: file description: File containing copy number segment information pattern: "*.{cns}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kaurravneet4123" diff --git a/modules/cooler/digest/meta.yml b/modules/cooler/digest/meta.yml index f46fbaff..4fb85e4f 100644 --- a/modules/cooler/digest/meta.yml +++ b/modules/cooler/digest/meta.yml @@ -26,9 +26,9 @@ input: documentation: http://biopython.org/DIST/docs/cookbook/Restriction.html output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bed: type: file diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index ab2d0356..1d98a62e 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -28,9 +28,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bedpe: type: file diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml index 1cf61615..8d4a6ed4 100644 --- a/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -24,9 +24,9 @@ output: type: file description: MultiQC custom content YML file containing software versions pattern: "software_versions_mqc.yml" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/cutadapt/meta.yml b/modules/cutadapt/meta.yml index 87276306..62c2ccde 100644 --- a/modules/cutadapt/meta.yml +++ b/modules/cutadapt/meta.yml @@ -36,9 +36,9 @@ output: type: file description: cuatadapt log file pattern: "*cutadapt.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/damageprofiler/meta.yml b/modules/damageprofiler/meta.yml index 9451f1b2..ff82ba09 100644 --- a/modules/damageprofiler/meta.yml +++ b/modules/damageprofiler/meta.yml @@ -40,9 +40,9 @@ input: pattern: "*.{fai}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - results: type: dir diff --git a/modules/deeptools/computematrix/meta.yml b/modules/deeptools/computematrix/meta.yml index e3b0282d..584fade1 100644 --- a/modules/deeptools/computematrix/meta.yml +++ b/modules/deeptools/computematrix/meta.yml @@ -46,9 +46,9 @@ output: description: | tabular file containing the scores of the generated matrix pattern: "*.{computeMatrix.vals.mat.tab}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/deeptools/plotfingerprint/meta.yml b/modules/deeptools/plotfingerprint/meta.yml index 6ba88882..3acd1471 100644 --- a/modules/deeptools/plotfingerprint/meta.yml +++ b/modules/deeptools/plotfingerprint/meta.yml @@ -50,9 +50,9 @@ output: description: | file containing BAM file quality metrics pattern: "*.{qcmetrics.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/deeptools/plotheatmap/meta.yml b/modules/deeptools/plotheatmap/meta.yml index 97af67f6..34f2865b 100644 --- a/modules/deeptools/plotheatmap/meta.yml +++ b/modules/deeptools/plotheatmap/meta.yml @@ -44,9 +44,9 @@ output: File containing the matrix of values used to generate the heatmap pattern: "*.{plotHeatmap.mat.tab}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/deeptools/plotprofile/meta.yml b/modules/deeptools/plotprofile/meta.yml index 08fafa49..5b61aed4 100644 --- a/modules/deeptools/plotprofile/meta.yml +++ b/modules/deeptools/plotprofile/meta.yml @@ -44,9 +44,9 @@ output: File containing the matrix of values used to generate the profile pattern: "*.{plotProfile.mat.tab}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/delly/call/meta.yml b/modules/delly/call/meta.yml index 16d1a6f2..75e5c9c2 100644 --- a/modules/delly/call/meta.yml +++ b/modules/delly/call/meta.yml @@ -38,9 +38,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bcf: type: file diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml index e92b1594..228c1a22 100644 --- a/modules/diamond/blastp/meta.yml +++ b/modules/diamond/blastp/meta.yml @@ -34,9 +34,9 @@ output: type: file description: File containing blastp hits pattern: "*.{blastp.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml index 6e92a336..4a3ab9b6 100644 --- a/modules/diamond/blastx/meta.yml +++ b/modules/diamond/blastx/meta.yml @@ -34,9 +34,9 @@ output: type: file description: File containing blastx hits pattern: "*.{blastx.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/diamond/makedb/meta.yml b/modules/diamond/makedb/meta.yml index 4d8cb695..e378be7e 100644 --- a/modules/diamond/makedb/meta.yml +++ b/modules/diamond/makedb/meta.yml @@ -25,9 +25,9 @@ output: type: file description: File of the indexed DIAMOND database pattern: "*.{dmnd}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/dragonflye/meta.yml b/modules/dragonflye/meta.yml index 9affa2f3..773795db 100644 --- a/modules/dragonflye/meta.yml +++ b/modules/dragonflye/meta.yml @@ -28,9 +28,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - contigs: type: file diff --git a/modules/dshbio/exportsegments/meta.yml b/modules/dshbio/exportsegments/meta.yml index c57a6179..b9b145df 100644 --- a/modules/dshbio/exportsegments/meta.yml +++ b/modules/dshbio/exportsegments/meta.yml @@ -32,9 +32,9 @@ output: type: file description: Assembly segment sequences in FASTA format pattern: "*.{fa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/filterbed/meta.yml b/modules/dshbio/filterbed/meta.yml index 5545aac1..0e09b392 100644 --- a/modules/dshbio/filterbed/meta.yml +++ b/modules/dshbio/filterbed/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Features in gzipped BED format pattern: "*.{bed.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/filtergff3/meta.yml b/modules/dshbio/filtergff3/meta.yml index d1b7a509..2fd916fa 100644 --- a/modules/dshbio/filtergff3/meta.yml +++ b/modules/dshbio/filtergff3/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Features in gzipped GFF3 format pattern: "*.{gff3.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/splitbed/meta.yml b/modules/dshbio/splitbed/meta.yml index 0c4788a1..16aec66b 100644 --- a/modules/dshbio/splitbed/meta.yml +++ b/modules/dshbio/splitbed/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Features in split gzipped BED formatted files pattern: "*.{bed.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/dshbio/splitgff3/meta.yml b/modules/dshbio/splitgff3/meta.yml index 1bdfa652..36e37862 100644 --- a/modules/dshbio/splitgff3/meta.yml +++ b/modules/dshbio/splitgff3/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Features in split gzipped GFF3 formatted files pattern: "*.{gff3.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/ensemblvep/meta.yml b/modules/ensemblvep/meta.yml index e97c5609..9ec4f6a4 100644 --- a/modules/ensemblvep/meta.yml +++ b/modules/ensemblvep/meta.yml @@ -56,9 +56,9 @@ output: type: file description: VEP report file pattern: "*.html" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/expansionhunter/meta.yml b/modules/expansionhunter/meta.yml index a5733d93..54bb3293 100644 --- a/modules/expansionhunter/meta.yml +++ b/modules/expansionhunter/meta.yml @@ -37,9 +37,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', gender:'female' ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/fastani/meta.yml b/modules/fastani/meta.yml index 783ae068..dc62d485 100644 --- a/modules/fastani/meta.yml +++ b/modules/fastani/meta.yml @@ -35,9 +35,9 @@ output: type: file description: Results of the query pattern: "*.ani.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index 72ddb7d7..cfef4a99 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -44,9 +44,9 @@ output: type: file description: fastq log file pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - reads_fail: type: file diff --git a/modules/fastqc/meta.yml b/modules/fastqc/meta.yml index 48031356..0ae08aee 100644 --- a/modules/fastqc/meta.yml +++ b/modules/fastqc/meta.yml @@ -40,9 +40,9 @@ output: type: file description: FastQC report archive pattern: "*_{fastqc.zip}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/fasttree/meta.yml b/modules/fasttree/meta.yml index 70000030..5906675b 100644 --- a/modules/fasttree/meta.yml +++ b/modules/fasttree/meta.yml @@ -19,9 +19,9 @@ input: pattern: "*.{fasta,fas,fa,mfa}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - phylogeny: type: file diff --git a/modules/fgbio/callmolecularconsensusreads/meta.yml b/modules/fgbio/callmolecularconsensusreads/meta.yml index 3e62c3a6..523f3214 100644 --- a/modules/fgbio/callmolecularconsensusreads/meta.yml +++ b/modules/fgbio/callmolecularconsensusreads/meta.yml @@ -36,9 +36,9 @@ output: description: | Output SAM or BAM file to write consensus reads. pattern: "*.{bam,sam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/fgbio/sortbam/meta.yml b/modules/fgbio/sortbam/meta.yml index def106c3..b8040dab 100644 --- a/modules/fgbio/sortbam/meta.yml +++ b/modules/fgbio/sortbam/meta.yml @@ -34,9 +34,9 @@ output: description: | Output SAM or BAM file. pattern: "*.{bam,sam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/flash/meta.yml b/modules/flash/meta.yml index 62d40e20..06807523 100644 --- a/modules/flash/meta.yml +++ b/modules/flash/meta.yml @@ -35,9 +35,9 @@ output: type: file description: The merged fastq reads pattern: "*fastq.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index b0177c76..be815bd8 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -45,9 +45,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index a5bac064..068f8ef1 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -45,9 +45,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - table: type: file diff --git a/modules/gatk4/bedtointervallist/meta.yml b/modules/gatk4/bedtointervallist/meta.yml index 28fd5d22..aacca1a6 100644 --- a/modules/gatk4/bedtointervallist/meta.yml +++ b/modules/gatk4/bedtointervallist/meta.yml @@ -31,9 +31,9 @@ output: type: file description: gatk interval list file pattern: "*.interval_list" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/createsequencedictionary/meta.yml b/modules/gatk4/createsequencedictionary/meta.yml index 21bdc599..90f415a2 100644 --- a/modules/gatk4/createsequencedictionary/meta.yml +++ b/modules/gatk4/createsequencedictionary/meta.yml @@ -22,9 +22,9 @@ output: type: file description: gatk dictionary file pattern: "*.{dict}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/gatk4/fastqtosam/meta.yml b/modules/gatk4/fastqtosam/meta.yml index 4ae9eeaa..ab56ec53 100644 --- a/modules/gatk4/fastqtosam/meta.yml +++ b/modules/gatk4/fastqtosam/meta.yml @@ -34,9 +34,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index 5bb87e80..bda0ccb1 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -48,9 +48,9 @@ output: type: file description: File containing the pileup summary table. pattern: "*.pileups.table" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/gatk4/haplotypecaller/meta.yml b/modules/gatk4/haplotypecaller/meta.yml index 4b8e8387..73adc950 100644 --- a/modules/gatk4/haplotypecaller/meta.yml +++ b/modules/gatk4/haplotypecaller/meta.yml @@ -46,9 +46,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/gatk4/intervallisttools/meta.yml b/modules/gatk4/intervallisttools/meta.yml index 65adb7b6..14f7db35 100644 --- a/modules/gatk4/intervallisttools/meta.yml +++ b/modules/gatk4/intervallisttools/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - interval_list: type: file diff --git a/modules/gatk4/markduplicates/meta.yml b/modules/gatk4/markduplicates/meta.yml index 58e30910..bd5ed5e7 100644 --- a/modules/gatk4/markduplicates/meta.yml +++ b/modules/gatk4/markduplicates/meta.yml @@ -32,9 +32,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/gatk4/mergebamalignment/meta.yml b/modules/gatk4/mergebamalignment/meta.yml index e2e7b7ec..7823c458 100644 --- a/modules/gatk4/mergebamalignment/meta.yml +++ b/modules/gatk4/mergebamalignment/meta.yml @@ -37,9 +37,9 @@ output: type: file description: The merged bam file pattern: "*.bam" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/mergevcfs/meta.yml b/modules/gatk4/mergevcfs/meta.yml index d2679ab8..b20d7bb5 100644 --- a/modules/gatk4/mergevcfs/meta.yml +++ b/modules/gatk4/mergevcfs/meta.yml @@ -34,9 +34,9 @@ output: type: file description: merged vcf file pattern: "*.vcf.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 75b38153..182b6712 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -84,9 +84,9 @@ output: type: file description: file containing information to be passed to LearnReadOrientationModel (only outputted when tumor_normal_pair mode is run) pattern: "*.f1r2.tar.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/gatk4/revertsam/meta.yml b/modules/gatk4/revertsam/meta.yml index d6a1d7fa..619450d3 100644 --- a/modules/gatk4/revertsam/meta.yml +++ b/modules/gatk4/revertsam/meta.yml @@ -27,9 +27,9 @@ output: type: file description: The reverted bam/sam file pattern: "*.reverted.bam" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/samtofastq/meta.yml b/modules/gatk4/samtofastq/meta.yml index 956d2186..20033ec2 100644 --- a/modules/gatk4/samtofastq/meta.yml +++ b/modules/gatk4/samtofastq/meta.yml @@ -27,9 +27,9 @@ output: type: file description: converted fastq file pattern: "*.fastq" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index c4266874..9eefb545 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -32,9 +32,9 @@ output: type: file description: Output file with split reads (BAM/SAM/CRAM) pattern: "*.{bam,sam,cram}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index 6b0a9026..4dbd71fe 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -39,9 +39,9 @@ output: type: file description: filtered VCF file pattern: "*.filtered.{vcf}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/genmap/index/meta.yml b/modules/genmap/index/meta.yml index cd299da2..adecf3c0 100644 --- a/modules/genmap/index/meta.yml +++ b/modules/genmap/index/meta.yml @@ -18,9 +18,9 @@ input: pattern: "*.{fasta,fa}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: index diff --git a/modules/genmap/mappability/meta.yml b/modules/genmap/mappability/meta.yml index 90807077..c28cbd6d 100644 --- a/modules/genmap/mappability/meta.yml +++ b/modules/genmap/mappability/meta.yml @@ -21,9 +21,9 @@ input: description: index file output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - wig: type: file diff --git a/modules/gffread/meta.yml b/modules/gffread/meta.yml index 1cb7fc91..bf1a15cb 100644 --- a/modules/gffread/meta.yml +++ b/modules/gffread/meta.yml @@ -24,9 +24,9 @@ output: type: file description: GTF file resulting from the conversion of the GFF input file pattern: "*.{gtf}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/glnexus/meta.yml b/modules/glnexus/meta.yml index fd1a407d..aec25bb0 100644 --- a/modules/glnexus/meta.yml +++ b/modules/glnexus/meta.yml @@ -24,9 +24,9 @@ input: pattern: "*.{gvcf,gvcf.gz,g.vcf,g.vcf.gz}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bcf: type: file diff --git a/modules/graphmap2/align/meta.yml b/modules/graphmap2/align/meta.yml index a5b3cd6c..a4acb648 100644 --- a/modules/graphmap2/align/meta.yml +++ b/modules/graphmap2/align/meta.yml @@ -41,9 +41,9 @@ output: type: file description: Alignment in SAM format pattern: "*.sam" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/graphmap2/index/meta.yml b/modules/graphmap2/index/meta.yml index 4ff63276..e7bd6cb6 100644 --- a/modules/graphmap2/index/meta.yml +++ b/modules/graphmap2/index/meta.yml @@ -20,9 +20,9 @@ output: type: file description: Graphmap2 fasta index in gmidx format pattern: "*.gmidx" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/gubbins/meta.yml b/modules/gubbins/meta.yml index 1a49b335..84b930a2 100644 --- a/modules/gubbins/meta.yml +++ b/modules/gubbins/meta.yml @@ -16,9 +16,9 @@ input: description: fasta alignment file pattern: "*.{fasta,fas,fa,aln}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - fasta: type: file diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index 60911685..dbec5534 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -18,9 +18,9 @@ output: type: file description: Compressed/uncompressed file pattern: "*.*" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/hifiasm/meta.yml b/modules/hifiasm/meta.yml index c6d5a735..3d4c9548 100644 --- a/modules/hifiasm/meta.yml +++ b/modules/hifiasm/meta.yml @@ -42,9 +42,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - raw_unitigs: type: file diff --git a/modules/hisat2/align/meta.yml b/modules/hisat2/align/meta.yml index 799f1808..6011cc34 100644 --- a/modules/hisat2/align/meta.yml +++ b/modules/hisat2/align/meta.yml @@ -48,9 +48,9 @@ output: type: file description: Aligment log pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/hisat2/build/meta.yml b/modules/hisat2/build/meta.yml index 1d3fc7e6..c08b296d 100644 --- a/modules/hisat2/build/meta.yml +++ b/modules/hisat2/build/meta.yml @@ -29,9 +29,9 @@ input: pattern: "*.{txt}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: file diff --git a/modules/hisat2/extractsplicesites/meta.yml b/modules/hisat2/extractsplicesites/meta.yml index 3befc4dd..97227faf 100644 --- a/modules/hisat2/extractsplicesites/meta.yml +++ b/modules/hisat2/extractsplicesites/meta.yml @@ -21,9 +21,9 @@ input: pattern: "*.{gtf}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - splicesites: type: file diff --git a/modules/hmmer/hmmalign/meta.yml b/modules/hmmer/hmmalign/meta.yml index 60020b32..c9a50bc2 100644 --- a/modules/hmmer/hmmalign/meta.yml +++ b/modules/hmmer/hmmalign/meta.yml @@ -32,9 +32,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - sthlm: type: file diff --git a/modules/homer/annotatepeaks/meta.yml b/modules/homer/annotatepeaks/meta.yml index f311741b..39fe4197 100644 --- a/modules/homer/annotatepeaks/meta.yml +++ b/modules/homer/annotatepeaks/meta.yml @@ -38,9 +38,9 @@ output: type: file description: The annotated peaks pattern: "*annotatePeaks.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/homer/findpeaks/meta.yml b/modules/homer/findpeaks/meta.yml index 51932688..d1450f3c 100644 --- a/modules/homer/findpeaks/meta.yml +++ b/modules/homer/findpeaks/meta.yml @@ -29,9 +29,9 @@ output: type: file description: The found peaks pattern: "*peaks.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml index 7a35857b..9a88c2e1 100644 --- a/modules/homer/maketagdirectory/meta.yml +++ b/modules/homer/maketagdirectory/meta.yml @@ -33,9 +33,9 @@ output: type: directory description: The "Tag Directory" pattern: "tag_dir" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/homer/makeucscfile/meta.yml b/modules/homer/makeucscfile/meta.yml index e63e979a..d9123c7e 100644 --- a/modules/homer/makeucscfile/meta.yml +++ b/modules/homer/makeucscfile/meta.yml @@ -30,9 +30,9 @@ output: type: file description: The UCSC bed graph pattern: "tag_dir/*ucsc.bedGraph.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@EMiller88" diff --git a/modules/iqtree/meta.yml b/modules/iqtree/meta.yml index 426ad0cf..0a3b4e4c 100644 --- a/modules/iqtree/meta.yml +++ b/modules/iqtree/meta.yml @@ -20,9 +20,9 @@ input: pattern: "*.{fasta,fas,fa,mfa}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - phylogeny: type: file diff --git a/modules/ivar/consensus/meta.yml b/modules/ivar/consensus/meta.yml index 2a95c51c..389e5fe6 100644 --- a/modules/ivar/consensus/meta.yml +++ b/modules/ivar/consensus/meta.yml @@ -42,9 +42,9 @@ output: type: file description: mpileup output from samtools mpileup [OPTIONAL] pattern: "*.mpileup" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@andersgs" diff --git a/modules/ivar/trim/meta.yml b/modules/ivar/trim/meta.yml index 762a9fe9..4798c25f 100644 --- a/modules/ivar/trim/meta.yml +++ b/modules/ivar/trim/meta.yml @@ -42,9 +42,9 @@ output: type: file description: Log file generated by iVar for use with MultiQC pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@andersgs" diff --git a/modules/ivar/variants/meta.yml b/modules/ivar/variants/meta.yml index 37eb9133..a689ffeb 100644 --- a/modules/ivar/variants/meta.yml +++ b/modules/ivar/variants/meta.yml @@ -42,9 +42,9 @@ output: type: file description: mpileup output from samtools mpileup [OPTIONAL] pattern: "*.mpileup" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@andersgs" diff --git a/modules/kallisto/index/meta.yml b/modules/kallisto/index/meta.yml index 6080eb77..a4fb08c3 100644 --- a/modules/kallisto/index/meta.yml +++ b/modules/kallisto/index/meta.yml @@ -18,9 +18,9 @@ input: pattern: "*.{fasta}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - idx: type: index diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index 41cf91a0..bc2433bb 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -58,9 +58,9 @@ output: type: file description: kb count output folder pattern: "*.{count}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/kallistobustools/ref/meta.yml b/modules/kallistobustools/ref/meta.yml index b9f50f20..353b9c11 100644 --- a/modules/kallistobustools/ref/meta.yml +++ b/modules/kallistobustools/ref/meta.yml @@ -27,9 +27,9 @@ input: pattern: "{standard,lamanno,nucleus}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - kb_ref_idx: type: file diff --git a/modules/kleborate/meta.yml b/modules/kleborate/meta.yml index 0394a626..eaf837e7 100644 --- a/modules/kleborate/meta.yml +++ b/modules/kleborate/meta.yml @@ -29,9 +29,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - txt: type: file diff --git a/modules/kraken2/kraken2/meta.yml b/modules/kraken2/kraken2/meta.yml index 3996fbc0..5b849c3e 100644 --- a/modules/kraken2/kraken2/meta.yml +++ b/modules/kraken2/kraken2/meta.yml @@ -50,9 +50,9 @@ output: Kraken2 report containing stats about classified and not classifed reads. pattern: "*.{report.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/last/dotplot/meta.yml b/modules/last/dotplot/meta.yml index fa092b4c..2ec94f58 100644 --- a/modules/last/dotplot/meta.yml +++ b/modules/last/dotplot/meta.yml @@ -38,9 +38,9 @@ output: type: file description: Pairwise alignment dot plot image, in GIF format. pattern: "*.gif" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/last/lastal/meta.yml b/modules/last/lastal/meta.yml index 1f8fde9c..94e76878 100644 --- a/modules/last/lastal/meta.yml +++ b/modules/last/lastal/meta.yml @@ -39,9 +39,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - maf: type: file diff --git a/modules/last/lastdb/meta.yml b/modules/last/lastdb/meta.yml index cddbc29c..e576fa18 100644 --- a/modules/last/lastdb/meta.yml +++ b/modules/last/lastdb/meta.yml @@ -28,9 +28,9 @@ input: pattern: "*.{fasta,fasta.gz,fastq,fastq.gz}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: directory diff --git a/modules/last/mafconvert/meta.yml b/modules/last/mafconvert/meta.yml index f0912ccd..3336f315 100644 --- a/modules/last/mafconvert/meta.yml +++ b/modules/last/mafconvert/meta.yml @@ -34,9 +34,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - axt_gz: type: file diff --git a/modules/last/mafswap/meta.yml b/modules/last/mafswap/meta.yml index 8821ab47..ce97fe97 100644 --- a/modules/last/mafswap/meta.yml +++ b/modules/last/mafswap/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Multiple Aligment Format (MAF) file, compressed with gzip pattern: "*.{maf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/last/postmask/meta.yml b/modules/last/postmask/meta.yml index d3a184eb..02e602f6 100644 --- a/modules/last/postmask/meta.yml +++ b/modules/last/postmask/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Multiple Aligment Format (MAF) file, compressed with gzip pattern: "*.{maf.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/last/split/meta.yml b/modules/last/split/meta.yml index 7b11bcd5..bc16fe9a 100644 --- a/modules/last/split/meta.yml +++ b/modules/last/split/meta.yml @@ -32,9 +32,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - maf: type: file diff --git a/modules/last/train/meta.yml b/modules/last/train/meta.yml index 820e4bc8..20c5780d 100644 --- a/modules/last/train/meta.yml +++ b/modules/last/train/meta.yml @@ -35,9 +35,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - param_file: type: file diff --git a/modules/lima/meta.yml b/modules/lima/meta.yml index 3bb861b5..d77246c6 100644 --- a/modules/lima/meta.yml +++ b/modules/lima/meta.yml @@ -68,10 +68,10 @@ output: type: file description: This file shows how many ZMWs have been filtered, how ZMWs many are same/different, and how many reads have been filtered. pattern: "*.summary" - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" authors: - "@sguizard" diff --git a/modules/lofreq/call/meta.yml b/modules/lofreq/call/meta.yml index 16d23cd9..97607663 100644 --- a/modules/lofreq/call/meta.yml +++ b/modules/lofreq/call/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/lofreq/callparallel/meta.yml b/modules/lofreq/callparallel/meta.yml index 15257180..a7dbd637 100644 --- a/modules/lofreq/callparallel/meta.yml +++ b/modules/lofreq/callparallel/meta.yml @@ -40,9 +40,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/lofreq/filter/meta.yml b/modules/lofreq/filter/meta.yml index 9aa92da7..fceee6f5 100644 --- a/modules/lofreq/filter/meta.yml +++ b/modules/lofreq/filter/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/lofreq/indelqual/meta.yml b/modules/lofreq/indelqual/meta.yml index 34f296d7..a6ec7dc2 100644 --- a/modules/lofreq/indelqual/meta.yml +++ b/modules/lofreq/indelqual/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/malt/build/meta.yml b/modules/malt/build/meta.yml index f1668b94..9985d834 100644 --- a/modules/malt/build/meta.yml +++ b/modules/malt/build/meta.yml @@ -38,9 +38,9 @@ input: pattern: output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: directory diff --git a/modules/malt/run/meta.yml b/modules/malt/run/meta.yml index 3ad78622..740ab8a5 100644 --- a/modules/malt/run/meta.yml +++ b/modules/malt/run/meta.yml @@ -32,9 +32,9 @@ input: description: Index/database directory from malt-build pattern: '*/' output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - rma6: type: file diff --git a/modules/maltextract/meta.yml b/modules/maltextract/meta.yml index 29271753..8f257100 100644 --- a/modules/maltextract/meta.yml +++ b/modules/maltextract/meta.yml @@ -38,9 +38,9 @@ input: pattern: "${ncbi_dir}/" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - results: type: directory diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index 7a99cc50..f434a5f1 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -34,6 +34,7 @@ process MASH_SKETCH { -o ${prefix} \\ -r $reads \\ 2> ${prefix}.mash_stats + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(mash --version 2>&1) diff --git a/modules/mash/sketch/meta.yml b/modules/mash/sketch/meta.yml index 3c8c714e..fba0e000 100644 --- a/modules/mash/sketch/meta.yml +++ b/modules/mash/sketch/meta.yml @@ -27,7 +27,7 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - mash: type: file description: Sketch output pattern: "*.{mash}" @@ -35,6 +35,10 @@ output: type: file description: Sketch statistics pattern: "*.{mash_stats}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@thanhleviet" diff --git a/modules/metaphlan3/meta.yml b/modules/metaphlan3/meta.yml index d9f9f520..0d3c6f85 100644 --- a/modules/metaphlan3/meta.yml +++ b/modules/metaphlan3/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - profile: type: file diff --git a/modules/methyldackel/extract/meta.yml b/modules/methyldackel/extract/meta.yml index 7219bb81..6c87f7c9 100644 --- a/modules/methyldackel/extract/meta.yml +++ b/modules/methyldackel/extract/meta.yml @@ -49,9 +49,9 @@ output: type: file description: bedGraph file containing per-base methylation metrics pattern: "*.{bedGraph}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/methyldackel/mbias/meta.yml b/modules/methyldackel/mbias/meta.yml index a6f58d09..4bc8f016 100644 --- a/modules/methyldackel/mbias/meta.yml +++ b/modules/methyldackel/mbias/meta.yml @@ -50,9 +50,9 @@ output: type: file description: Text file containing methylation bias pattern: "*.{txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/minia/meta.yml b/modules/minia/meta.yml index 638cc3ad..255bcc20 100644 --- a/modules/minia/meta.yml +++ b/modules/minia/meta.yml @@ -37,9 +37,9 @@ output: type: file description: Minia output h5 file pattern: "*{.h5}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/minimap2/align/meta.yml b/modules/minimap2/align/meta.yml index 1cb20473..35ed411b 100644 --- a/modules/minimap2/align/meta.yml +++ b/modules/minimap2/align/meta.yml @@ -38,9 +38,9 @@ output: type: file description: Alignment in PAF format pattern: "*.paf" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/minimap2/index/meta.yml b/modules/minimap2/index/meta.yml index c1c43c70..e8450add 100644 --- a/modules/minimap2/index/meta.yml +++ b/modules/minimap2/index/meta.yml @@ -20,9 +20,9 @@ output: type: file description: Minimap2 fasta index. pattern: "*.mmi" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/mosdepth/meta.yml b/modules/mosdepth/meta.yml index 4c0be86c..5627c268 100644 --- a/modules/mosdepth/meta.yml +++ b/modules/mosdepth/meta.yml @@ -67,9 +67,9 @@ output: type: file description: Index file for BED file with per-region coverage pattern: "*.{regions.bed.gz.csi}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/msisensor/msi/meta.yml b/modules/msisensor/msi/meta.yml index c01f74e0..e3f13e2e 100644 --- a/modules/msisensor/msi/meta.yml +++ b/modules/msisensor/msi/meta.yml @@ -45,9 +45,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - txt: type: file diff --git a/modules/msisensor/scan/meta.yml b/modules/msisensor/scan/meta.yml index 940b53a5..4900f8cc 100644 --- a/modules/msisensor/scan/meta.yml +++ b/modules/msisensor/scan/meta.yml @@ -29,9 +29,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - txt: type: file diff --git a/modules/multiqc/meta.yml b/modules/multiqc/meta.yml index 2d99ec0d..a54f95ac 100644 --- a/modules/multiqc/meta.yml +++ b/modules/multiqc/meta.yml @@ -29,9 +29,9 @@ output: type: file description: Plots created by MultiQC pattern: "*_data" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/muscle/meta.yml b/modules/muscle/meta.yml index 845a8284..d28afa72 100644 --- a/modules/muscle/meta.yml +++ b/modules/muscle/meta.yml @@ -48,9 +48,9 @@ output: type: file description: Log file of MUSCLE run pattern: "*{.log}" - - version: + - versions: type: file - description: File containing MUSCLE software version + description: File containing software versions pattern: "versions.yml" authors: - "@MGordon" diff --git a/modules/nanolyse/meta.yml b/modules/nanolyse/meta.yml index 2411d33d..c59607fa 100644 --- a/modules/nanolyse/meta.yml +++ b/modules/nanolyse/meta.yml @@ -38,9 +38,9 @@ output: type: file description: Log of the Nanolyse run. pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/nanoplot/meta.yml b/modules/nanoplot/meta.yml index cf897eb9..0527624f 100644 --- a/modules/nanoplot/meta.yml +++ b/modules/nanoplot/meta.yml @@ -49,9 +49,9 @@ output: type: file description: log file of NanoPlot run pattern: "*{.log}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/nextclade/meta.yml b/modules/nextclade/meta.yml index 730b0fa4..1b4a435a 100755 --- a/modules/nextclade/meta.yml +++ b/modules/nextclade/meta.yml @@ -30,9 +30,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - csv: type: file diff --git a/modules/optitype/meta.yml b/modules/optitype/meta.yml index 02e5cec1..15912125 100644 --- a/modules/optitype/meta.yml +++ b/modules/optitype/meta.yml @@ -29,9 +29,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', seq_type:'DNA' ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - output: type: file diff --git a/modules/pairix/meta.yml b/modules/pairix/meta.yml index 3c43541a..45577065 100644 --- a/modules/pairix/meta.yml +++ b/modules/pairix/meta.yml @@ -29,9 +29,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: file diff --git a/modules/pairtools/dedup/meta.yml b/modules/pairtools/dedup/meta.yml index d5a8ae87..288b421e 100644 --- a/modules/pairtools/dedup/meta.yml +++ b/modules/pairtools/dedup/meta.yml @@ -27,9 +27,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - pairs: type: file diff --git a/modules/pairtools/flip/meta.yml b/modules/pairtools/flip/meta.yml index 981e3828..0d7aa082 100644 --- a/modules/pairtools/flip/meta.yml +++ b/modules/pairtools/flip/meta.yml @@ -30,9 +30,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - flip: type: file diff --git a/modules/pairtools/parse/meta.yml b/modules/pairtools/parse/meta.yml index 940fe5d1..8c9c30dc 100644 --- a/modules/pairtools/parse/meta.yml +++ b/modules/pairtools/parse/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - pairsam: type: file diff --git a/modules/pairtools/restrict/meta.yml b/modules/pairtools/restrict/meta.yml index 9dfb8f76..0ab3b420 100644 --- a/modules/pairtools/restrict/meta.yml +++ b/modules/pairtools/restrict/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - restrict: type: file diff --git a/modules/pairtools/select/meta.yml b/modules/pairtools/select/meta.yml index 18e97e99..5e45129b 100644 --- a/modules/pairtools/select/meta.yml +++ b/modules/pairtools/select/meta.yml @@ -27,9 +27,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - selected: type: file diff --git a/modules/pairtools/sort/meta.yml b/modules/pairtools/sort/meta.yml index 6f36323c..6db2f9e2 100644 --- a/modules/pairtools/sort/meta.yml +++ b/modules/pairtools/sort/meta.yml @@ -27,9 +27,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - sorted: type: file diff --git a/modules/pangolin/meta.yml b/modules/pangolin/meta.yml index b1b583e9..29878ef0 100644 --- a/modules/pangolin/meta.yml +++ b/modules/pangolin/meta.yml @@ -24,9 +24,9 @@ output: type: file description: Pangolin lineage report pattern: "*.{csv}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@kevinmenden" diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index eb89d628..b476c829 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -38,9 +38,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - css: type: file diff --git a/modules/picard/collectmultiplemetrics/meta.yml b/modules/picard/collectmultiplemetrics/meta.yml index a588fd98..587983a1 100644 --- a/modules/picard/collectmultiplemetrics/meta.yml +++ b/modules/picard/collectmultiplemetrics/meta.yml @@ -41,9 +41,9 @@ output: type: file description: PDF plots of metrics pattern: "*.{pdf}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/collectwgsmetrics/meta.yml b/modules/picard/collectwgsmetrics/meta.yml index ec828af5..7ae2d41d 100644 --- a/modules/picard/collectwgsmetrics/meta.yml +++ b/modules/picard/collectwgsmetrics/meta.yml @@ -36,9 +36,9 @@ output: type: file description: Alignment metrics files generated by picard pattern: "*_{metrics}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/filtersamreads/meta.yml b/modules/picard/filtersamreads/meta.yml index 82f78065..d63ebcf0 100644 --- a/modules/picard/filtersamreads/meta.yml +++ b/modules/picard/filtersamreads/meta.yml @@ -42,9 +42,9 @@ output: type: file description: Filtered BAM file pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/picard/markduplicates/meta.yml b/modules/picard/markduplicates/meta.yml index db72b5c5..13f2d350 100644 --- a/modules/picard/markduplicates/meta.yml +++ b/modules/picard/markduplicates/meta.yml @@ -42,9 +42,9 @@ output: type: file description: Duplicate metrics file generated by picard pattern: "*.{metrics.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/mergesamfiles/meta.yml b/modules/picard/mergesamfiles/meta.yml index 82ba2a43..f732daf4 100644 --- a/modules/picard/mergesamfiles/meta.yml +++ b/modules/picard/mergesamfiles/meta.yml @@ -32,9 +32,9 @@ output: type: file description: Merged BAM file pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/picard/sortsam/meta.yml b/modules/picard/sortsam/meta.yml index 37d12b91..3e0fb450 100644 --- a/modules/picard/sortsam/meta.yml +++ b/modules/picard/sortsam/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/plasmidid/meta.yml b/modules/plasmidid/meta.yml index a2689ddf..8cde23c5 100644 --- a/modules/plasmidid/meta.yml +++ b/modules/plasmidid/meta.yml @@ -66,9 +66,9 @@ output: type: directory description: Directory containing the kmer files produced by plasmidid pattern: "database" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/plink/vcf/meta.yml b/modules/plink/vcf/meta.yml index 146a0030..d39892b7 100644 --- a/modules/plink/vcf/meta.yml +++ b/modules/plink/vcf/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bed: type: file diff --git a/modules/preseq/lcextrap/meta.yml b/modules/preseq/lcextrap/meta.yml index 616d8243..bdc61228 100755 --- a/modules/preseq/lcextrap/meta.yml +++ b/modules/preseq/lcextrap/meta.yml @@ -30,9 +30,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - ccurve: type: file diff --git a/modules/prodigal/meta.yml b/modules/prodigal/meta.yml index c24ca4a3..5bcc4e77 100644 --- a/modules/prodigal/meta.yml +++ b/modules/prodigal/meta.yml @@ -28,9 +28,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/prokka/meta.yml b/modules/prokka/meta.yml index 26fb767a..87446694 100644 --- a/modules/prokka/meta.yml +++ b/modules/prokka/meta.yml @@ -34,9 +34,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - gff: type: file diff --git a/modules/pycoqc/meta.yml b/modules/pycoqc/meta.yml index 32012e83..33bd6b07 100644 --- a/modules/pycoqc/meta.yml +++ b/modules/pycoqc/meta.yml @@ -38,9 +38,9 @@ output: type: file description: Results in JSON format pattern: "*.{json}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/pydamage/analyze/meta.yml b/modules/pydamage/analyze/meta.yml index 7369a3a3..918fbce9 100644 --- a/modules/pydamage/analyze/meta.yml +++ b/modules/pydamage/analyze/meta.yml @@ -42,9 +42,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - csv: type: file diff --git a/modules/pydamage/filter/meta.yml b/modules/pydamage/filter/meta.yml index 29d4642b..706e38b0 100644 --- a/modules/pydamage/filter/meta.yml +++ b/modules/pydamage/filter/meta.yml @@ -38,9 +38,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - csv: type: file diff --git a/modules/qcat/meta.yml b/modules/qcat/meta.yml index 5946eaa8..938bc337 100644 --- a/modules/qcat/meta.yml +++ b/modules/qcat/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Demultiplexed fastq samples pattern: "*.fastq.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@yuukiiwa" diff --git a/modules/qualimap/bamqc/meta.yml b/modules/qualimap/bamqc/meta.yml index 74c3ffdf..cc0471fc 100644 --- a/modules/qualimap/bamqc/meta.yml +++ b/modules/qualimap/bamqc/meta.yml @@ -41,9 +41,9 @@ output: type: dir description: Qualimap results dir pattern: "*/*" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@phue" diff --git a/modules/quast/meta.yml b/modules/quast/meta.yml index 742dc8f0..8b692e9e 100644 --- a/modules/quast/meta.yml +++ b/modules/quast/meta.yml @@ -36,9 +36,9 @@ output: pattern: "{prefix}.lineage_report.csv" - report: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/rapidnj/meta.yml b/modules/rapidnj/meta.yml index 7f7da9b9..ead54e09 100644 --- a/modules/rapidnj/meta.yml +++ b/modules/rapidnj/meta.yml @@ -20,9 +20,9 @@ input: pattern: "*.{fasta,fas,fa,mfa}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - phylogeny: type: file diff --git a/modules/rasusa/meta.yml b/modules/rasusa/meta.yml index 61cdbe0c..610afd3f 100644 --- a/modules/rasusa/meta.yml +++ b/modules/rasusa/meta.yml @@ -35,9 +35,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - reads: type: file diff --git a/modules/raxmlng/meta.yml b/modules/raxmlng/meta.yml index d5f755c5..3cc558f4 100644 --- a/modules/raxmlng/meta.yml +++ b/modules/raxmlng/meta.yml @@ -20,9 +20,9 @@ input: pattern: "*.{fasta,fas,fa,mfa}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - phylogeny: type: file diff --git a/modules/rsem/calculateexpression/meta.yml b/modules/rsem/calculateexpression/meta.yml index 079751d3..e2fb8f6d 100644 --- a/modules/rsem/calculateexpression/meta.yml +++ b/modules/rsem/calculateexpression/meta.yml @@ -42,9 +42,9 @@ output: type: file description: RSEM logs pattern: "*.log" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam_star: type: file diff --git a/modules/rsem/preparereference/meta.yml b/modules/rsem/preparereference/meta.yml index 5ccca28a..94f7cc05 100644 --- a/modules/rsem/preparereference/meta.yml +++ b/modules/rsem/preparereference/meta.yml @@ -28,9 +28,9 @@ output: type: file description: Fasta file of transcripts pattern: "rsem/*transcripts.fa" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/bamstat/meta.yml b/modules/rseqc/bamstat/meta.yml index adb81c1c..64a0b9e4 100644 --- a/modules/rseqc/bamstat/meta.yml +++ b/modules/rseqc/bamstat/meta.yml @@ -27,9 +27,9 @@ output: type: file description: bam statistics report pattern: "*.bam_stat.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/inferexperiment/meta.yml b/modules/rseqc/inferexperiment/meta.yml index f89f90d1..63710d7b 100644 --- a/modules/rseqc/inferexperiment/meta.yml +++ b/modules/rseqc/inferexperiment/meta.yml @@ -30,9 +30,9 @@ output: type: file description: infer_experiment results report pattern: "*.infer_experiment.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/innerdistance/meta.yml b/modules/rseqc/innerdistance/meta.yml index 5b2b5e79..7eea1350 100644 --- a/modules/rseqc/innerdistance/meta.yml +++ b/modules/rseqc/innerdistance/meta.yml @@ -46,9 +46,9 @@ output: type: file description: script to reproduce the plot pattern: "*.inner_distance_plot.R" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/junctionannotation/meta.yml b/modules/rseqc/junctionannotation/meta.yml index d96e7756..5562b0b7 100644 --- a/modules/rseqc/junctionannotation/meta.yml +++ b/modules/rseqc/junctionannotation/meta.yml @@ -52,9 +52,9 @@ output: description: Rscript to reproduce the plots pattern: "*.r" - log: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/junctionsaturation/meta.yml b/modules/rseqc/junctionsaturation/meta.yml index aaf44cdc..ffa359ab 100644 --- a/modules/rseqc/junctionsaturation/meta.yml +++ b/modules/rseqc/junctionsaturation/meta.yml @@ -35,9 +35,9 @@ output: type: file description: Junction saturation R-script pattern: "*.r" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/readdistribution/meta.yml b/modules/rseqc/readdistribution/meta.yml index 7ffab04f..d12ad600 100644 --- a/modules/rseqc/readdistribution/meta.yml +++ b/modules/rseqc/readdistribution/meta.yml @@ -31,9 +31,9 @@ output: type: file description: the read distribution report pattern: "*.read_distribution.txt" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/rseqc/readduplication/meta.yml b/modules/rseqc/readduplication/meta.yml index efc48c0d..98d25ea4 100644 --- a/modules/rseqc/readduplication/meta.yml +++ b/modules/rseqc/readduplication/meta.yml @@ -42,9 +42,9 @@ output: type: file description: script to reproduce the plot pattern: "*.R" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/salmon/index/meta.yml b/modules/salmon/index/meta.yml index 4d16b359..c956f15c 100644 --- a/modules/salmon/index/meta.yml +++ b/modules/salmon/index/meta.yml @@ -25,9 +25,9 @@ output: type: directory description: Folder containing the star index files pattern: "salmon" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/salmon/quant/meta.yml b/modules/salmon/quant/meta.yml index 981df89e..47e81229 100644 --- a/modules/salmon/quant/meta.yml +++ b/modules/salmon/quant/meta.yml @@ -45,9 +45,9 @@ output: type: directory description: Folder containing the quantification results for a specific sample pattern: "${prefix}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/samtools/ampliconclip/meta.yml b/modules/samtools/ampliconclip/meta.yml index 2ecbf463..7aa8c6bd 100644 --- a/modules/samtools/ampliconclip/meta.yml +++ b/modules/samtools/ampliconclip/meta.yml @@ -43,9 +43,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index 77d21861..6e63b671 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -22,9 +22,9 @@ output: type: file description: FASTA index file pattern: "*.{fai}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/fastq/meta.yml b/modules/samtools/fastq/meta.yml index 7c4cc488..9a45886b 100644 --- a/modules/samtools/fastq/meta.yml +++ b/modules/samtools/fastq/meta.yml @@ -34,9 +34,9 @@ output: type: file description: compressed FASTQ file pattern: "*.fastq.gz" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@suzannejin" diff --git a/modules/samtools/flagstat/meta.yml b/modules/samtools/flagstat/meta.yml index d40e45b4..d408cb76 100644 --- a/modules/samtools/flagstat/meta.yml +++ b/modules/samtools/flagstat/meta.yml @@ -40,9 +40,9 @@ output: type: file description: File containing samtools flagstat output pattern: "*.{flagstat}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/idxstats/meta.yml b/modules/samtools/idxstats/meta.yml index 93e8f694..f4cb613f 100644 --- a/modules/samtools/idxstats/meta.yml +++ b/modules/samtools/idxstats/meta.yml @@ -41,9 +41,9 @@ output: type: file description: File containing samtools idxstats output pattern: "*.{idxstats}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 6f7dc887..5f4dd3fb 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -38,9 +38,9 @@ output: type: file description: CSI index file pattern: "*.{csi}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/merge/meta.yml b/modules/samtools/merge/meta.yml index c5f15a14..1903cdaa 100644 --- a/modules/samtools/merge/meta.yml +++ b/modules/samtools/merge/meta.yml @@ -34,9 +34,9 @@ output: type: file description: BAM file pattern: "*.{bam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/mpileup/meta.yml b/modules/samtools/mpileup/meta.yml index aa0ccc6d..ce55643a 100644 --- a/modules/samtools/mpileup/meta.yml +++ b/modules/samtools/mpileup/meta.yml @@ -38,9 +38,9 @@ output: type: file description: mpileup file pattern: "*.{mpileup}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/sort/meta.yml b/modules/samtools/sort/meta.yml index d4f70a8e..cd47c86d 100644 --- a/modules/samtools/sort/meta.yml +++ b/modules/samtools/sort/meta.yml @@ -34,9 +34,9 @@ output: type: file description: Sorted BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/stats/meta.yml b/modules/samtools/stats/meta.yml index 1c7dcc8b..d75d73e2 100644 --- a/modules/samtools/stats/meta.yml +++ b/modules/samtools/stats/meta.yml @@ -39,9 +39,9 @@ output: type: file description: File containing samtools stats output pattern: "*.{stats}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/samtools/view/meta.yml b/modules/samtools/view/meta.yml index 6388f9bc..2e66e7cd 100644 --- a/modules/samtools/view/meta.yml +++ b/modules/samtools/view/meta.yml @@ -34,9 +34,9 @@ output: type: file description: filtered/converted BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/seacr/callpeak/meta.yml b/modules/seacr/callpeak/meta.yml index 80da69e4..43044c2f 100644 --- a/modules/seacr/callpeak/meta.yml +++ b/modules/seacr/callpeak/meta.yml @@ -40,9 +40,9 @@ output: type: file description: Bed file containing the calculated peaks. pattern: "*.bed" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@chris-cheshire" diff --git a/modules/seqkit/split2/meta.yml b/modules/seqkit/split2/meta.yml index 5dfee7f9..beb676ea 100644 --- a/modules/seqkit/split2/meta.yml +++ b/modules/seqkit/split2/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Split fastq files pattern: "*.{fq.gz/fastq.gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@FriederikeHanssen" diff --git a/modules/seqtk/sample/meta.yml b/modules/seqtk/sample/meta.yml index b9422433..6cc4d657 100644 --- a/modules/seqtk/sample/meta.yml +++ b/modules/seqtk/sample/meta.yml @@ -30,9 +30,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - reads: type: file diff --git a/modules/seqtk/subseq/meta.yml b/modules/seqtk/subseq/meta.yml index f7c6c624..0d9a802b 100644 --- a/modules/seqtk/subseq/meta.yml +++ b/modules/seqtk/subseq/meta.yml @@ -21,9 +21,9 @@ input: pattern: "*.{bed,lst}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - sequences: type: file diff --git a/modules/sequenzautils/bam2seqz/meta.yml b/modules/sequenzautils/bam2seqz/meta.yml index 2ce4ab7f..278f9750 100755 --- a/modules/sequenzautils/bam2seqz/meta.yml +++ b/modules/sequenzautils/bam2seqz/meta.yml @@ -37,9 +37,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - seqz: type: file diff --git a/modules/sequenzautils/gcwiggle/meta.yml b/modules/sequenzautils/gcwiggle/meta.yml index 35daa498..4ecba04a 100644 --- a/modules/sequenzautils/gcwiggle/meta.yml +++ b/modules/sequenzautils/gcwiggle/meta.yml @@ -25,9 +25,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - wig: type: file diff --git a/modules/seqwish/induce/meta.yml b/modules/seqwish/induce/meta.yml index c2836824..c5f9d4c7 100644 --- a/modules/seqwish/induce/meta.yml +++ b/modules/seqwish/induce/meta.yml @@ -38,9 +38,9 @@ output: type: file description: Variation graph in GFA 1.0 format pattern: "*.{gfa}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" diff --git a/modules/shovill/meta.yml b/modules/shovill/meta.yml index b878f93d..1f3c3a8f 100644 --- a/modules/shovill/meta.yml +++ b/modules/shovill/meta.yml @@ -28,9 +28,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - contigs: type: file diff --git a/modules/snpdists/meta.yml b/modules/snpdists/meta.yml index e86e3092..bf4366ff 100644 --- a/modules/snpdists/meta.yml +++ b/modules/snpdists/meta.yml @@ -33,9 +33,9 @@ output: type: file description: The output TSV file containing SNP distance matrix pattern: "*.tsv" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@abhi18av" diff --git a/modules/snpeff/meta.yml b/modules/snpeff/meta.yml index aa21e2bc..ba049c0e 100644 --- a/modules/snpeff/meta.yml +++ b/modules/snpeff/meta.yml @@ -49,9 +49,9 @@ output: type: file description: snpEff report file pattern: "*.html" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/snpsites/meta.yml b/modules/snpsites/meta.yml index ae250e5f..5361aa3e 100644 --- a/modules/snpsites/meta.yml +++ b/modules/snpsites/meta.yml @@ -15,9 +15,9 @@ input: description: fasta alignment file pattern: "*.{fasta,fas,fa,aln}" output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - fasta: type: file diff --git a/modules/spades/meta.yml b/modules/spades/meta.yml index 38c5c2ae..3d5943ae 100644 --- a/modules/spades/meta.yml +++ b/modules/spades/meta.yml @@ -59,9 +59,9 @@ output: type: file description: | Spades log file - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/staphopiasccmec/meta.yml b/modules/staphopiasccmec/meta.yml index e1ce3a05..006e5389 100644 --- a/modules/staphopiasccmec/meta.yml +++ b/modules/staphopiasccmec/meta.yml @@ -31,9 +31,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - tsv: type: file diff --git a/modules/star/align/meta.yml b/modules/star/align/meta.yml index 7f0217ea..00f955dd 100644 --- a/modules/star/align/meta.yml +++ b/modules/star/align/meta.yml @@ -45,9 +45,9 @@ output: type: file description: STAR log progress file pattern: "*Log.progress.out" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam_sorted: type: file diff --git a/modules/star/genomegenerate/meta.yml b/modules/star/genomegenerate/meta.yml index 70525738..09728b58 100644 --- a/modules/star/genomegenerate/meta.yml +++ b/modules/star/genomegenerate/meta.yml @@ -26,9 +26,9 @@ output: type: directory description: Folder containing the star index files pattern: "star" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/strelka/germline/meta.yml b/modules/strelka/germline/meta.yml index 4423e437..3f86b045 100644 --- a/modules/strelka/germline/meta.yml +++ b/modules/strelka/germline/meta.yml @@ -55,9 +55,9 @@ output: type: file description: index file for the genome_vcf file pattern: "*_genome.vcf.gz.tbi" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@arontommi" diff --git a/modules/stringtie/merge/meta.yml b/modules/stringtie/merge/meta.yml index 5752c0a9..81eca6dc 100644 --- a/modules/stringtie/merge/meta.yml +++ b/modules/stringtie/merge/meta.yml @@ -27,5 +27,10 @@ output: description: | Merged gtf from annotation and stringtie output gtfs. pattern: "*.gtf" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + authors: - "@yuukiiwa" diff --git a/modules/stringtie/stringtie/meta.yml b/modules/stringtie/stringtie/meta.yml index f9363009..0074b90f 100644 --- a/modules/stringtie/stringtie/meta.yml +++ b/modules/stringtie/stringtie/meta.yml @@ -48,9 +48,9 @@ output: type: file description: for running ballgown pattern: "*.{ballgown}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/subread/featurecounts/meta.yml b/modules/subread/featurecounts/meta.yml index 504d2f48..1100a091 100644 --- a/modules/subread/featurecounts/meta.yml +++ b/modules/subread/featurecounts/meta.yml @@ -43,9 +43,9 @@ output: type: file description: Summary log file pattern: "*.featureCounts.txt.summary" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: diff --git a/modules/tabix/bgzip/meta.yml b/modules/tabix/bgzip/meta.yml index 801d98bc..0b0787bf 100644 --- a/modules/tabix/bgzip/meta.yml +++ b/modules/tabix/bgzip/meta.yml @@ -30,9 +30,9 @@ output: type: file description: Output compressed file pattern: "*.{gz}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/tabix/bgziptabix/meta.yml b/modules/tabix/bgziptabix/meta.yml index 92f62bf3..5b4cc4e8 100644 --- a/modules/tabix/bgziptabix/meta.yml +++ b/modules/tabix/bgziptabix/meta.yml @@ -36,9 +36,9 @@ output: type: file description: tabix index file pattern: "*.{gz.tbi}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/tabix/tabix/meta.yml b/modules/tabix/tabix/meta.yml index 1ca58bcf..15edf8c3 100644 --- a/modules/tabix/tabix/meta.yml +++ b/modules/tabix/tabix/meta.yml @@ -30,9 +30,9 @@ output: type: file description: tabix index file pattern: "*.{tbi}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/tiddit/sv/meta.yml b/modules/tiddit/sv/meta.yml index 2a351766..4060a450 100644 --- a/modules/tiddit/sv/meta.yml +++ b/modules/tiddit/sv/meta.yml @@ -42,9 +42,9 @@ output: type: file description: tab pattern: "*.{signals.tab}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@maxulysse" diff --git a/modules/trimgalore/meta.yml b/modules/trimgalore/meta.yml index 0c9fc925..7c46bea9 100644 --- a/modules/trimgalore/meta.yml +++ b/modules/trimgalore/meta.yml @@ -48,9 +48,9 @@ output: type: file description: Trim Galore! trimming report pattern: "*_{report.txt}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@drpatelh" diff --git a/modules/ucsc/bed12tobigbed/meta.yml b/modules/ucsc/bed12tobigbed/meta.yml index 9bd2dd46..f3e8a604 100755 --- a/modules/ucsc/bed12tobigbed/meta.yml +++ b/modules/ucsc/bed12tobigbed/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bigbed: type: file diff --git a/modules/ucsc/bedclip/meta.yml b/modules/ucsc/bedclip/meta.yml index b11d2083..c7372925 100755 --- a/modules/ucsc/bedclip/meta.yml +++ b/modules/ucsc/bedclip/meta.yml @@ -28,9 +28,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bedgraph: type: file diff --git a/modules/ucsc/bedgraphtobigwig/meta.yml b/modules/ucsc/bedgraphtobigwig/meta.yml index ea20604c..1be1a3b7 100755 --- a/modules/ucsc/bedgraphtobigwig/meta.yml +++ b/modules/ucsc/bedgraphtobigwig/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bigwig: type: file diff --git a/modules/ucsc/bigwigaverageoverbed/meta.yml b/modules/ucsc/bigwigaverageoverbed/meta.yml index 93328df0..c2b31f88 100644 --- a/modules/ucsc/bigwigaverageoverbed/meta.yml +++ b/modules/ucsc/bigwigaverageoverbed/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - tab: type: file diff --git a/modules/ucsc/wigtobigwig/meta.yml b/modules/ucsc/wigtobigwig/meta.yml index 102fd8ef..4723ff2b 100644 --- a/modules/ucsc/wigtobigwig/meta.yml +++ b/modules/ucsc/wigtobigwig/meta.yml @@ -24,9 +24,9 @@ input: description: chromosome sizes file output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bw: type: file diff --git a/modules/unicycler/meta.yml b/modules/unicycler/meta.yml index f6581919..e3b1aab9 100644 --- a/modules/unicycler/meta.yml +++ b/modules/unicycler/meta.yml @@ -30,9 +30,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - scaffolds: type: file @@ -46,9 +46,9 @@ output: type: file description: unicycler log file pattern: "*.{log}" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@JoseEspinosa" diff --git a/modules/untar/meta.yml b/modules/untar/meta.yml index 0dc38292..2b586c92 100644 --- a/modules/untar/meta.yml +++ b/modules/untar/meta.yml @@ -18,9 +18,9 @@ output: type: file description: pattern: "*.*" - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" authors: - "@joseespinosa" diff --git a/modules/unzip/meta.yml b/modules/unzip/meta.yml index 386ca8bb..57c07f00 100644 --- a/modules/unzip/meta.yml +++ b/modules/unzip/meta.yml @@ -18,14 +18,14 @@ input: pattern: "*.zip" output: - - version: - type: file - description: File or directory of decompressed archive - pattern: "versions.yml" - unzipped_archive: type: directory description: Directory contents of the unzipped archive pattern: '${archive.baseName}/' + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@jfy133" diff --git a/modules/variantbam/meta.yml b/modules/variantbam/meta.yml index 62ddb578..9394e418 100644 --- a/modules/variantbam/meta.yml +++ b/modules/variantbam/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file diff --git a/modules/vcftools/meta.yml b/modules/vcftools/meta.yml index e39a0347..a8f864a9 100644 --- a/modules/vcftools/meta.yml +++ b/modules/vcftools/meta.yml @@ -33,9 +33,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - vcf: type: file diff --git a/modules/yara/index/meta.yml b/modules/yara/index/meta.yml index acf70f2b..651a67ee 100644 --- a/modules/yara/index/meta.yml +++ b/modules/yara/index/meta.yml @@ -21,9 +21,9 @@ input: description: Input genome fasta file output: - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - index: type: file diff --git a/modules/yara/mapper/meta.yml b/modules/yara/mapper/meta.yml index 4beb0c78..d49823d2 100644 --- a/modules/yara/mapper/meta.yml +++ b/modules/yara/mapper/meta.yml @@ -34,9 +34,9 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - version: + - versions: type: file - description: File containing software version + description: File containing software versions pattern: "versions.yml" - bam: type: file From 515793c73e7f1d8a742aad0ab830ad9575318784 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 4 Oct 2021 12:37:21 +0100 Subject: [PATCH 115/314] Fix bug with SortMeRna not working on a single db file (#788) * Fix bug with SortMeRna not working on a single db file * Remove tag for instances when running just to create index --- modules/bbmap/bbsplit/main.nf | 1 - modules/sortmerna/main.nf | 11 ++++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index 7a24312b..b2249b17 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -5,7 +5,6 @@ params.options = [:] options = initOptions(params.options) process BBMAP_BBSPLIT { - tag "$meta.id" label 'process_high' publishDir "${params.outdir}", mode: params.publish_dir_mode, diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index f35b1468..96fd06aa 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -20,7 +20,7 @@ process SORTMERNA { input: tuple val(meta), path(reads) - path fasta + path fastas output: tuple val(meta), path("*.fastq.gz"), emit: reads @@ -28,14 +28,11 @@ process SORTMERNA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - - def Refs = "" - for (i=0; i Date: Tue, 5 Oct 2021 10:36:14 +0100 Subject: [PATCH 116/314] Update SortMeRNA to 4.3.4 (#790) --- modules/sortmerna/main.nf | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 96fd06aa..c571a654 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -11,11 +11,11 @@ process SORTMERNA { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::sortmerna=4.2.0" : null) + conda (params.enable_conda ? "bioconda::sortmerna=4.3.4" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--0" + container "https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0" } else { - container "quay.io/biocontainers/sortmerna:4.2.0--0" + container "quay.io/biocontainers/sortmerna:sortmerna:4.3.4--h9ee0642_0" } input: @@ -40,12 +40,12 @@ process SORTMERNA { --other non_rRNA_reads \\ $options.args - gzip -f < non_rRNA_reads.fq > ${prefix}.fastq.gz + mv non_rRNA_reads.fq.gz ${prefix}.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(sortmerna --version 2>&1 | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } else { @@ -62,13 +62,13 @@ process SORTMERNA { --out2 \\ $options.args - gzip -f < non_rRNA_reads_fwd.fq > ${prefix}_1.fastq.gz - gzip -f < non_rRNA_reads_rev.fq > ${prefix}_2.fastq.gz + mv non_rRNA_reads_fwd.fq.gz ${prefix}_1.fastq.gz + mv non_rRNA_reads_rev.fq.gz ${prefix}_2.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(sortmerna --version 2>&1 | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } From 714ec6823ea1cae96910e4976efa6f51d06c9d21 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Tue, 5 Oct 2021 12:05:37 +0200 Subject: [PATCH 117/314] Fix sortmerna docker container pointer (#791) --- modules/sortmerna/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index c571a654..9602bb53 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -15,7 +15,7 @@ process SORTMERNA { if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0" } else { - container "quay.io/biocontainers/sortmerna:sortmerna:4.3.4--h9ee0642_0" + container "quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0" } input: From f20c427339936fc95443802d246f3559472ec0c8 Mon Sep 17 00:00:00 2001 From: Lee Katz Date: Tue, 5 Oct 2021 15:49:46 -0400 Subject: [PATCH 118/314] added classic mlst module (#742) * added classic mlst module * removed nf-core TODO comments * included drpatelh suggestions * adjust version capture identation * update main to pass lint * follow output expected by test.yml * suggested prefix change from rpetit3 * Apply suggestions from code review Co-authored-by: Gregor Sturm Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/mlst/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/mlst/main.nf | 42 ++++++++++++++++++ modules/mlst/meta.yml | 42 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mlst/main.nf | 13 ++++++ tests/modules/mlst/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/mlst/functions.nf create mode 100644 modules/mlst/main.nf create mode 100644 modules/mlst/meta.yml create mode 100644 tests/modules/mlst/main.nf create mode 100644 tests/modules/mlst/test.yml diff --git a/modules/mlst/functions.nf b/modules/mlst/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mlst/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf new file mode 100644 index 00000000..faac9871 --- /dev/null +++ b/modules/mlst/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MLST { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mlst=2.19.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1" + } else { + container "quay.io/biocontainers/mlst:2.19.0--hdfd78af_1" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + mlst \\ + --threads $task.cpus \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) + END_VERSIONS + """ + +} diff --git a/modules/mlst/meta.yml b/modules/mlst/meta.yml new file mode 100644 index 00000000..e9d2a09f --- /dev/null +++ b/modules/mlst/meta.yml @@ -0,0 +1,42 @@ +name: mlst +description: Run Torsten Seemann's classic MLST on a genome assembly +keywords: + - mlst +tools: + - mlst: + description: Scan contig files against PubMLST typing schemes + homepage: None + documentation: None + tool_dev_url: None + doi: "" + licence: ['GPL v2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Assembly fasta file + pattern: "*.{fasta,fa,fna}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: MLST calls in tsv format + pattern: "*.{tsv}" + +authors: + - "@lskatz" + - "@tseemann" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 63152fe0..af3645df 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -634,6 +634,10 @@ minimap2/index: - modules/minimap2/index/** - tests/modules/minimap2/index/** +mlst: + - modules/mlst/** + - tests/modules/mlst/** + mosdepth: - modules/mosdepth/** - tests/modules/mosdepth/** diff --git a/tests/modules/mlst/main.nf b/tests/modules/mlst/main.nf new file mode 100644 index 00000000..4b7d44be --- /dev/null +++ b/tests/modules/mlst/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MLST } from '../../../modules/mlst/main.nf' addParams( options: [:] ) + +workflow test_mlst { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://raw.githubusercontent.com/nf-core/test-datasets/bactmap/genome/NCTC13799.fna", checkIfExists: true) ] + + MLST ( input ) +} diff --git a/tests/modules/mlst/test.yml b/tests/modules/mlst/test.yml new file mode 100644 index 00000000..5a7c7a0e --- /dev/null +++ b/tests/modules/mlst/test.yml @@ -0,0 +1,7 @@ +- name: mlst test_mlst + command: nextflow run tests/modules/mlst -entry test_mlst -c tests/config/nextflow.config + tags: + - mlst + files: + - path: output/mlst/test.tsv + md5sum: b52df6178834a156c9402012718eb65e From 3868c3ab4bce121bde7f15b9137a0beef59e5d98 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Tue, 5 Oct 2021 22:23:01 +0200 Subject: [PATCH 119/314] Add gtdbtk/classifywf module (#765) * initial commit [ci skip] * reuse the modules code from nf-core/mag [ci skip] * add contextual information for the module [ci skip] * add stubs to avoid downloading db [ci skip] * trigger test * iterate on tests [ci skip] * itereate tests [ci skip] * add bins [ci skip] * fix stubs [ci skip] * interation on tests with stubs [ci skip] * use the existing pattern and fasta for input * accomodate the new version file format * use variable for the stub [ci skip] * update the versions file in meta.yml * Accomodate code review regarding publishDir function [ci skip] Co-authored-by: Harshil Patel * remove extra newline * use bioconda channel * update the description for filtered file * Apply suggestions from code review * Update main.nf * Update main.nf * Update modules/gtdbtk/classifywf/meta.yml Co-authored-by: Harshil Patel Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/gtdbtk/classifywf/functions.nf | 78 ++++++++++++++++++++++ modules/gtdbtk/classifywf/main.nf | 83 ++++++++++++++++++++++++ modules/gtdbtk/classifywf/meta.yml | 78 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/gtdbtk/classifywf/main.nf | 32 +++++++++ tests/modules/gtdbtk/classifywf/test.yml | 8 +++ 6 files changed, 283 insertions(+) create mode 100644 modules/gtdbtk/classifywf/functions.nf create mode 100644 modules/gtdbtk/classifywf/main.nf create mode 100644 modules/gtdbtk/classifywf/meta.yml create mode 100644 tests/modules/gtdbtk/classifywf/main.nf create mode 100644 tests/modules/gtdbtk/classifywf/test.yml diff --git a/modules/gtdbtk/classifywf/functions.nf b/modules/gtdbtk/classifywf/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gtdbtk/classifywf/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gtdbtk/classifywf/main.nf b/modules/gtdbtk/classifywf/main.nf new file mode 100644 index 00000000..fdcef76a --- /dev/null +++ b/modules/gtdbtk/classifywf/main.nf @@ -0,0 +1,83 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '1.5.0' // When using stubs for the GTDB database, the version info isn't printed. + +process GTDBTK_CLASSIFYWF { + tag "${meta.assembler}-${meta.id}" + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gtdbtk=1.5.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0" + } + + input: + tuple val(meta), path("bins/*") + tuple val(db_name), path("database/*") + + output: + path "gtdbtk.${meta.assembler}-${meta.id}.*.summary.tsv" , emit: summary + path "gtdbtk.${meta.assembler}-${meta.id}.*.classify.tree.gz" , emit: tree + path "gtdbtk.${meta.assembler}-${meta.id}.*.markers_summary.tsv", emit: markers + path "gtdbtk.${meta.assembler}-${meta.id}.*.msa.fasta.gz" , emit: msa + path "gtdbtk.${meta.assembler}-${meta.id}.*.user_msa.fasta" , emit: user_msa + path "gtdbtk.${meta.assembler}-${meta.id}.*.filtered.tsv" , emit: filtered + path "gtdbtk.${meta.assembler}-${meta.id}.log" , emit: log + path "gtdbtk.${meta.assembler}-${meta.id}.warnings.log" , emit: warnings + path "gtdbtk.${meta.assembler}-${meta.id}.failed_genomes.tsv" , emit: failed + path "versions.yml" , emit: versions + + script: + def pplacer_scratch = params.gtdbtk_pplacer_scratch ? "--scratch_dir pplacer_tmp" : "" + """ + export GTDBTK_DATA_PATH="\${PWD}/database" + if [ ${pplacer_scratch} != "" ] ; then + mkdir pplacer_tmp + fi + + gtdbtk classify_wf \\ + $options.args \\ + --genome_dir bins \\ + --prefix "gtdbtk.${meta.assembler}-${meta.id}" \\ + --out_dir "\${PWD}" \\ + --cpus $task.cpus \\ + --pplacer_cpus $params.gtdbtk_pplacer_cpus \\ + $pplacer_scratch \\ + --min_perc_aa $params.gtdbtk_min_perc_aa \\ + --min_af $params.gtdbtk_min_af + + gzip "gtdbtk.${meta.assembler}-${meta.id}".*.classify.tree "gtdbtk.${meta.assembler}-${meta.id}".*.msa.fasta + mv gtdbtk.log "gtdbtk.${meta.assembler}-${meta.id}.log" + mv gtdbtk.warnings.log "gtdbtk.${meta.assembler}-${meta.id}.warnings.log" + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") + END_VERSIONS + """ + + stub: + """ + touch gtdbtk.${meta.assembler}-${meta.id}.stub.summary.tsv + touch gtdbtk.${meta.assembler}-${meta.id}.stub.classify.tree.gz + touch gtdbtk.${meta.assembler}-${meta.id}.stub.markers_summary.tsv + touch gtdbtk.${meta.assembler}-${meta.id}.stub.msa.fasta.gz + touch gtdbtk.${meta.assembler}-${meta.id}.stub.user_msa.fasta + touch gtdbtk.${meta.assembler}-${meta.id}.stub.filtered.tsv + touch gtdbtk.${meta.assembler}-${meta.id}.log + touch gtdbtk.${meta.assembler}-${meta.id}.warnings.log + touch gtdbtk.${meta.assembler}-${meta.id}.failed_genomes.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo "$VERSION") + END_VERSIONS + """ +} diff --git a/modules/gtdbtk/classifywf/meta.yml b/modules/gtdbtk/classifywf/meta.yml new file mode 100644 index 00000000..d70de362 --- /dev/null +++ b/modules/gtdbtk/classifywf/meta.yml @@ -0,0 +1,78 @@ +name: gtdbtk_classifywf +description: GTDB-Tk is a software toolkit for assigning objective taxonomic classifications to bacterial and archaeal genomes based on the Genome Database Taxonomy GTDB. +keywords: + - GTDB taxonomy + - taxonomic classification +tools: + - gtdbtk: + description: GTDB-Tk is a software toolkit for assigning objective taxonomic classifications to bacterial and archaeal genomes based on the Genome Database Taxonomy GTDB. + homepage: https://ecogenomics.github.io/GTDBTk/ + documentation: https://ecogenomics.github.io/GTDBTk/ + tool_dev_url: https://github.com/Ecogenomics/GTDBTk + doi: "10.1093/bioinformatics/btz848" + licence: ['GNU General Public v3 (GPL v3)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false, assembler:'spades' ] + - bins: + type: The binned fasta files from the assembler + description: Fasta files + pattern: "*.{fasta,fa}" + - database: + type: The local copy of the taxonomic database used by GTDB-tk + description: The unzipped copy of the database + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - summary: + type: file + description: A TSV summary file for the classification + pattern: "*.{summary.tsv}" + - tree: + type: file + description: NJ or UPGMA tree in Newick format produced from a multiple sequence alignment + pattern: "*.{classify.tree.gz}" + - markers: + type: file + description: A TSV summary file lineage markers used for the classification. + pattern: "*.{markers_summary.tsv}" + - msa: + type: file + description: Multiple sequence alignments file. + pattern: "*.{msa.fasta.gz}" + - user_msa: + type: file + description: Multiple sequence alignments file for the user-provided files. + pattern: "*.{user_msa.fasta.gz}" + - filtered: + type: file + description: A list of genomes with an insufficient number of amino acids in MSA.. + pattern: "*.{filtered.tsv}" + - log: + type: file + description: GTDB-tk log file + pattern: "*.{log}" + - warnings: + type: file + description: GTDB-tk warnings log file + pattern: "*.{warnings.log}" + - failed: + type: file + description: A TSV summary of the genomes which GTDB-tk failed to classify. + pattern: "*.{failed_genomes.tsv}" +authors: + - "@skrakau" + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index af3645df..34a3889b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -458,6 +458,10 @@ graphmap2/index: - modules/graphmap2/index/** - tests/modules/graphmap2/index/** +gtdbtk/classifywf: + - modules/gtdbtk/classifywf/** + - tests/modules/gtdbtk/classifywf/** + gubbins: - modules/gubbins/** - tests/modules/gubbins/** diff --git a/tests/modules/gtdbtk/classifywf/main.nf b/tests/modules/gtdbtk/classifywf/main.nf new file mode 100644 index 00000000..f52b0ccc --- /dev/null +++ b/tests/modules/gtdbtk/classifywf/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' addParams( options: [:] ) + +process STUB_GTDBTK_DATABASE { + output: + tuple val("gtdbtk_r202_data"), path("database/*"), emit: database + + stub: + """ + mkdir database + touch database/gtdbtk_r202_data + """ +} + +workflow test_gtdbtk_classifywf { + + STUB_GTDBTK_DATABASE() + + input = [ + [ id:'test', single_end:false, assembler:'SPADES' ], + [ + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['scaffolds_fasta'], checkIfExists: true) + ] + ] + + GTDBTK_CLASSIFYWF ( input, STUB_GTDBTK_DATABASE.out.database ) +} diff --git a/tests/modules/gtdbtk/classifywf/test.yml b/tests/modules/gtdbtk/classifywf/test.yml new file mode 100644 index 00000000..6d0f055e --- /dev/null +++ b/tests/modules/gtdbtk/classifywf/test.yml @@ -0,0 +1,8 @@ +- name: gtdbtk classifywf + command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c tests/config/nextflow.config -stub-run + tags: + - gtdbtk + - gtdbtk/classifywf + files: + - path: output/gtdbtk/gtdbtk.SPADES-test.stub.summary.tsv + md5sum: d41d8cd98f00b204e9800998ecf8427e From 7d98bf1d7dc67c8e578aab81c8ec5f7fb209cb56 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 14:27:30 -0600 Subject: [PATCH 120/314] add mashtree module (#767) * add mashtree module * remove todo * whitespace adjustment * remove un-reproducible md5sum * Update main.nf * Update main.nf * Update meta.yml * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/mashtree/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/mashtree/main.nf | 44 +++++++++++++++++++ modules/mashtree/meta.yml | 48 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mashtree/main.nf | 16 +++++++ tests/modules/mashtree/test.yml | 8 ++++ 6 files changed, 198 insertions(+) create mode 100644 modules/mashtree/functions.nf create mode 100644 modules/mashtree/main.nf create mode 100644 modules/mashtree/meta.yml create mode 100644 tests/modules/mashtree/main.nf create mode 100644 tests/modules/mashtree/test.yml diff --git a/modules/mashtree/functions.nf b/modules/mashtree/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mashtree/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf new file mode 100644 index 00000000..db0b14f5 --- /dev/null +++ b/modules/mashtree/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MASHTREE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mashtree=1.2.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0" + } else { + container "quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0" + } + + input: + tuple val(meta), path(seqs) + + output: + tuple val(meta), path("*.dnd"), emit: tree + tuple val(meta), path("*.tsv"), emit: matrix + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + mashtree \\ + $options.args \\ + --numcpus $task.cpus \\ + --outmatrix ${prefix}.tsv \\ + --outtree ${prefix}.dnd \\ + $seqs + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) + END_VERSIONS + """ +} diff --git a/modules/mashtree/meta.yml b/modules/mashtree/meta.yml new file mode 100644 index 00000000..3cf74772 --- /dev/null +++ b/modules/mashtree/meta.yml @@ -0,0 +1,48 @@ +name: mashtree +description: Quickly create a tree using Mash distances +keywords: + - tree + - mash + - fasta + - fastq +tools: + - mashtree: + description: Create a tree using Mash distances + homepage: https://github.com/lskatz/mashtree + documentation: https://github.com/lskatz/mashtree + tool_dev_url: https://github.com/lskatz/mashtree + doi: "https://doi.org/10.21105/joss.01762" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - seqs: + type: file + description: FASTA, FASTQ, GenBank, or Mash sketch files + pattern: "*.{fna,fna.gz,fasta,fasta.gz,fa,fa.gz,gbk,gbk.gz,fastq.gz,msh}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tree: + type: file + description: A Newick formatted tree file + pattern: "*.{dnd}" + - matrix: + type: file + description: A TSV matrix of pair-wise Mash distances + pattern: "*.{tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 34a3889b..7b010097 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -614,6 +614,10 @@ mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** +mashtree: + - modules/mashtree/** + - tests/modules/mashtree/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/mashtree/main.nf b/tests/modules/mashtree/main.nf new file mode 100644 index 00000000..47a7c12a --- /dev/null +++ b/tests/modules/mashtree/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MASHTREE } from '../../../modules/mashtree/main.nf' addParams( options: [:] ) + +workflow test_mashtree { + + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + MASHTREE ( input ) +} diff --git a/tests/modules/mashtree/test.yml b/tests/modules/mashtree/test.yml new file mode 100644 index 00000000..83ff6272 --- /dev/null +++ b/tests/modules/mashtree/test.yml @@ -0,0 +1,8 @@ +- name: mashtree test_mashtree + command: nextflow run tests/modules/mashtree -entry test_mashtree -c tests/config/nextflow.config + tags: + - mashtree + files: + - path: output/mashtree/test.dnd + md5sum: 007b3949a9f0c991624791d2fb076824 + - path: output/mashtree/test.tsv From bb7beff49740a7b92e851208b7e25875a909fe5c Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 14:42:09 -0600 Subject: [PATCH 121/314] add hicap module (#772) * add hicap module * add info on optional inputs * fix typo * Update meta.yml * Update main.nf * Update meta.yml * Update modules/hicap/main.nf Co-authored-by: Harshil Patel * Update modules/hicap/main.nf Co-authored-by: Harshil Patel Co-authored-by: Harshil Patel --- modules/hicap/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/hicap/main.nf | 56 +++++++++++++++++++++++ modules/hicap/meta.yml | 59 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/hicap/main.nf | 16 +++++++ tests/modules/hicap/test.yml | 10 +++++ 6 files changed, 223 insertions(+) create mode 100644 modules/hicap/functions.nf create mode 100644 modules/hicap/main.nf create mode 100644 modules/hicap/meta.yml create mode 100644 tests/modules/hicap/main.nf create mode 100644 tests/modules/hicap/test.yml diff --git a/modules/hicap/functions.nf b/modules/hicap/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/hicap/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf new file mode 100644 index 00000000..e2e70678 --- /dev/null +++ b/modules/hicap/main.nf @@ -0,0 +1,56 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process HICAP { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::hicap=1.0.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0" + } else { + container "quay.io/biocontainers/hicap:1.0.3--py_0" + } + + input: + tuple val(meta), path(fasta) + path database_dir + path model_fp + + output: + tuple val(meta), path("*.gbk"), emit: gbk + tuple val(meta), path("*.svg"), emit: svg + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def database_args = database_dir ? "--database_dir ${database_dir}" : "" + def model_args = model_fp ? "--model_fp ${model_fp}" : "" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + hicap \\ + --query_fp $fasta_name \\ + $database_args \\ + $model_args \\ + $options.args \\ + --threads $task.cpus \\ + -o ./ + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) + END_VERSIONS + """ +} diff --git a/modules/hicap/meta.yml b/modules/hicap/meta.yml new file mode 100644 index 00000000..275df665 --- /dev/null +++ b/modules/hicap/meta.yml @@ -0,0 +1,59 @@ +name: hicap +description: Identify cap locus serotype and structure in your Haemophilus influenzae assemblies +keywords: + - fasta + - serotype + - Haemophilus influenzae +tools: + - hicap: + description: In silico typing of the H. influenzae capsule locus + homepage: https://github.com/scwatts/hicap + documentation: https://github.com/scwatts/hicap + tool_dev_url: https://github.com/scwatts/hicap + doi: "https://doi.org/10.1128/JCM.00190-19" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA formatted assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - database_dir: + type: directory + description: Optional - Directory containing locus database + pattern: "*/*" + - model_fp: + type: file + description: Optional - Prodigal model to use for gene prediction + pattern: "*.{bin}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gbk: + type: file + description: GenBank file and cap locus annotations + pattern: "*.gbk" + - svg: + type: file + description: Visualization of annotated cap locus + pattern: "*.svg" + - tsv: + type: file + description: Detailed summary of cap locus annotations + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7b010097..1cd98564 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -470,6 +470,10 @@ gunzip: - modules/gunzip/** - tests/modules/gunzip/** +hicap: + - modules/hicap/** + - tests/modules/hicap/** + hifiasm: - modules/hifiasm/** - tests/modules/hifiasm/** diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf new file mode 100644 index 00000000..77c309a5 --- /dev/null +++ b/tests/modules/hicap/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] ) + +workflow test_hicap { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species-specific/haemophilus-influenzae/GCF_900478275.fna.gz", checkIfExists: true) ] + + database_dir = [] + model_fp = [] + + HICAP ( input, database_dir, model_fp ) +} diff --git a/tests/modules/hicap/test.yml b/tests/modules/hicap/test.yml new file mode 100644 index 00000000..8c8420fd --- /dev/null +++ b/tests/modules/hicap/test.yml @@ -0,0 +1,10 @@ +- name: hicap test_hicap + command: nextflow run tests/modules/hicap -entry test_hicap -c tests/config/nextflow.config + tags: + - hicap + files: + - path: output/hicap/GCF_900478275.gbk + md5sum: 562d026956903354ac80721f501335d4 + - path: output/hicap/GCF_900478275.svg + md5sum: 4fb94871dd0fdd8b4496049668176631 + - path: output/hicap/GCF_900478275.tsv From 8179bab8194ad3974f9c25c5f18d5a3695895693 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 14:48:47 -0600 Subject: [PATCH 122/314] add csvtk/concat module (#785) * add csvtk/concat module * Update modules/csvtk/concat/main.nf Co-authored-by: Gregor Sturm * allow alternate delimiters * Update main.nf * Update modules/csvtk/concat/main.nf Co-authored-by: Harshil Patel * Update modules/csvtk/concat/main.nf Co-authored-by: Harshil Patel * Update modules/csvtk/concat/main.nf Co-authored-by: Harshil Patel * Update modules/csvtk/concat/main.nf Co-authored-by: Harshil Patel * Update modules/csvtk/concat/main.nf * Apply suggestions from code review Co-authored-by: Gregor Sturm Co-authored-by: Harshil Patel --- modules/csvtk/concat/functions.nf | 78 +++++++++++++++++++++++++++++ modules/csvtk/concat/main.nf | 49 ++++++++++++++++++ modules/csvtk/concat/meta.yml | 51 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/csvtk/concat/main.nf | 20 ++++++++ tests/modules/csvtk/concat/test.yml | 8 +++ 6 files changed, 210 insertions(+) create mode 100644 modules/csvtk/concat/functions.nf create mode 100644 modules/csvtk/concat/main.nf create mode 100644 modules/csvtk/concat/meta.yml create mode 100644 tests/modules/csvtk/concat/main.nf create mode 100644 tests/modules/csvtk/concat/test.yml diff --git a/modules/csvtk/concat/functions.nf b/modules/csvtk/concat/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/csvtk/concat/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf new file mode 100644 index 00000000..afccf722 --- /dev/null +++ b/modules/csvtk/concat/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CSVTK_CONCAT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" + } else { + container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" + } + + input: + tuple val(meta), path(csv) + val in_format + val out_format + + output: + tuple val(meta), path("*.${out_format}"), emit: csv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) + def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) + """ + csvtk \\ + concat \\ + $options.args \\ + --num-cpus $task.cpus \\ + --delimiter "${delimiter}" \\ + --out-delimiter "${out_delimiter}" \\ + --out-file ${prefix}.${out_format} \\ + $csv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + csvtk: \$(echo \$( csvtk version | sed -e "s/csvtk v//g" )) + END_VERSIONS + """ +} diff --git a/modules/csvtk/concat/meta.yml b/modules/csvtk/concat/meta.yml new file mode 100644 index 00000000..6c7f9f10 --- /dev/null +++ b/modules/csvtk/concat/meta.yml @@ -0,0 +1,51 @@ +name: csvtk_concat +description: Concatenate two or more CSV (or TSV) tables into a single table +keywords: + - concatenate + - tsv + - csv +tools: + - csvtk: + description: A cross-platform, efficient, practical CSV/TSV toolkit + homepage: http://bioinf.shenwei.me/csvtk + documentation: http://bioinf.shenwei.me/csvtk + tool_dev_url: https://github.com/shenwei356/csvtk + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - csv: + type: file + description: CSV/TSV formatted files + pattern: "*.{csv,tsv}" + - in_format: + type: string + description: Input format (csv, tab, or a delimiting character) + pattern: "*" + - out_format: + type: string + description: Output format (csv, tab, or a delimiting character) + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "version.yml" + - csv: + type: file + description: Concatenated CSV/TSV file + pattern: "*.{csv,tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 1cd98564..914ed8f9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -266,6 +266,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +csvtk/concat: + - modules/csvtk/concat/** + - tests/modules/csvtk/concat/** + custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** diff --git a/tests/modules/csvtk/concat/main.nf b/tests/modules/csvtk/concat/main.nf new file mode 100644 index 00000000..22b0205f --- /dev/null +++ b/tests/modules/csvtk/concat/main.nf @@ -0,0 +1,20 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' addParams( options: [:] ) + +workflow test_csvtk_concat { + + input = [ + [ id:'test' ], // meta map + [ file("https://github.com/nf-core/test-datasets/raw/bacass/bacass_hybrid.csv", checkIfExists: true), + file("https://github.com/nf-core/test-datasets/raw/bacass/bacass_long.csv", checkIfExists: true), + file("https://github.com/nf-core/test-datasets/raw/bacass/bacass_short.csv", checkIfExists: true) ] + ] + + in_format = "tsv" + out_format = "csv" + + CSVTK_CONCAT ( input, in_format, out_format ) +} diff --git a/tests/modules/csvtk/concat/test.yml b/tests/modules/csvtk/concat/test.yml new file mode 100644 index 00000000..0fe9c604 --- /dev/null +++ b/tests/modules/csvtk/concat/test.yml @@ -0,0 +1,8 @@ +- name: csvtk concat + command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c tests/config/nextflow.config + tags: + - csvtk + - csvtk/concat + files: + - path: output/csvtk/test.csv + md5sum: 917fe5d857f04b58e0f49c384d167cec From 8d04c34934c371bd17eda052bfea403b95e26c7f Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 14:55:41 -0600 Subject: [PATCH 123/314] add ismapper module (#773) * add ismapper module * Update main.nf * Update main.nf * Update meta.yml * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/ismapper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/ismapper/main.nf | 44 +++++++++++++++++++ modules/ismapper/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ismapper/main.nf | 18 ++++++++ tests/modules/ismapper/test.yml | 27 ++++++++++++ 6 files changed, 221 insertions(+) create mode 100644 modules/ismapper/functions.nf create mode 100644 modules/ismapper/main.nf create mode 100644 modules/ismapper/meta.yml create mode 100644 tests/modules/ismapper/main.nf create mode 100644 tests/modules/ismapper/test.yml diff --git a/modules/ismapper/functions.nf b/modules/ismapper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ismapper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf new file mode 100644 index 00000000..20d3d5b7 --- /dev/null +++ b/modules/ismapper/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ISMAPPER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ismapper=2.0.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1" + } + + input: + tuple val(meta), path(reads), path(reference), path(query) + + output: + tuple val(meta), path("results/*"), emit: results + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + ismap \\ + $options.args \\ + --t $task.cpus \\ + --output_dir results \\ + --queries $query \\ + --reference $reference \\ + --reads $reads + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) + END_VERSIONS + """ +} diff --git a/modules/ismapper/meta.yml b/modules/ismapper/meta.yml new file mode 100644 index 00000000..4ca2450a --- /dev/null +++ b/modules/ismapper/meta.yml @@ -0,0 +1,50 @@ +name: ismapper +description: Identify insertion sites positions in bacterial genomes +keywords: + - fastq + - insertion sequences +tools: + - ismapper: + description: A mapping-based tool for identification of the site and orientation of IS insertions in bacterial genomes. + homepage: https://github.com/jhawkey/IS_mapper + documentation: https://github.com/jhawkey/IS_mapper + tool_dev_url: https://github.com/jhawkey/IS_mapper + doi: "https://doi.org/10.1186/s12864-015-1860-2" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: A set of paired-end FASTQ files + pattern: "*.{fastq.gz,fq.gz}" + - reference: + type: file + description: Reference genome in GenBank format + pattern: "*.{gbk}" + - query: + type: file + description: Insertion sequences to query in FASTA format + pattern: "*.{fasta,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: directory + description: Directory containing ISMapper result files + pattern: "*/*" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 914ed8f9..8de7f7e2 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -521,6 +521,10 @@ iqtree: - modules/iqtree/** - tests/modules/iqtree/** +ismapper: + - modules/ismapper/** + - tests/modules/ismapper/** + ivar/consensus: - modules/ivar/consensus/** - tests/modules/ivar/consensus/** diff --git a/tests/modules/ismapper/main.nf b/tests/modules/ismapper/main.nf new file mode 100644 index 00000000..b28344dc --- /dev/null +++ b/tests/modules/ismapper/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ISMAPPER } from '../../../modules/ismapper/main.nf' addParams( options: [:] ) + +workflow test_ismapper { + + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + file("https://github.com/jhawkey/IS_mapper/raw/master/test/inputs/S_suis_P17.gbk", checkIfExists: true), + file("https://github.com/jhawkey/IS_mapper/raw/master/test/inputs/ISSsu3.fasta", checkIfExists: true) + ] + + ISMAPPER ( input ) +} diff --git a/tests/modules/ismapper/test.yml b/tests/modules/ismapper/test.yml new file mode 100644 index 00000000..0574b855 --- /dev/null +++ b/tests/modules/ismapper/test.yml @@ -0,0 +1,27 @@ +- name: ismapper test_ismapper + command: nextflow run tests/modules/ismapper -entry test_ismapper -c tests/config/nextflow.config + tags: + - ismapper + files: + - path: output/ismapper/results/test/ISSsu3/test_ISSsu3_left_final.fastq + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_ISSsu3_right_final.fastq + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test__AM946016.1_closest.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test__AM946016.1_intersect.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test__AM946016.1_table.txt + md5sum: 9e05cda3990cb841db2bfb6e6e04a1f5 + - path: output/ismapper/results/test/ISSsu3/test_left_AM946016.1_finalcov.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_left_AM946016.1_merged.sorted.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_left_AM946016.1_unpaired.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_right_AM946016.1_finalcov.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_right_AM946016.1_merged.sorted.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ismapper/results/test/ISSsu3/test_right_AM946016.1_unpaired.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e From 0ba88fb8699ab1d94ca748ca1c5a903cbbc30602 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 15:08:47 -0600 Subject: [PATCH 124/314] add roary module (#776) * add module roary * Update meta.yml * Update meta.yml * Update meta.yml * Update meta.yml * Update main.nf * Update meta.yml * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/roary/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/roary/main.nf | 43 ++++++++++++++++++ modules/roary/meta.yml | 47 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/roary/main.nf | 16 +++++++ tests/modules/roary/test.yml | 39 +++++++++++++++++ 6 files changed, 227 insertions(+) create mode 100644 modules/roary/functions.nf create mode 100644 modules/roary/main.nf create mode 100644 modules/roary/meta.yml create mode 100644 tests/modules/roary/main.nf create mode 100644 tests/modules/roary/test.yml diff --git a/modules/roary/functions.nf b/modules/roary/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/roary/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/roary/main.nf b/modules/roary/main.nf new file mode 100644 index 00000000..9dc948fb --- /dev/null +++ b/modules/roary/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ROARY { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::roary=3.13.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0" + } else { + container "quay.io/biocontainers/roary:3.13.0--pl526h516909a_0" + } + + input: + tuple val(meta), path(gff) + + output: + tuple val(meta), path("results/*") , emit: results + tuple val(meta), path("results/*.aln"), optional: true, emit: aln + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + roary \\ + $options.args \\ + -p $task.cpus \\ + -f results/ \\ + $gff + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( roary --version ) + END_VERSIONS + """ +} diff --git a/modules/roary/meta.yml b/modules/roary/meta.yml new file mode 100644 index 00000000..4cf42bdf --- /dev/null +++ b/modules/roary/meta.yml @@ -0,0 +1,47 @@ +name: roary +description: Calculate pan-genome from annotated bacterial assemblies in GFF3 format +keywords: + - gff + - pan-genome + - alignment +tools: + - roary: + description: Rapid large-scale prokaryote pan genome analysis + homepage: http://sanger-pathogens.github.io/Roary/ + documentation: http://sanger-pathogens.github.io/Roary/ + tool_dev_url: https://github.com/sanger-pathogens/Roary/ + doi: "http://dx.doi.org/10.1093/bioinformatics/btv421" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gff: + type: file + description: A set of GFF3 formatted files + pattern: "*.{gff}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: directory + description: Directory containing Roary result files + pattern: "*/*" + - aln: + type: file + description: Core-genome alignment produced by Roary (Optional) + pattern: "*.{aln}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8de7f7e2..19c17f40 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -811,6 +811,10 @@ raxmlng: - modules/raxmlng/** - tests/modules/raxmlng/** +roary: + - modules/roary/** + - tests/modules/roary/** + rsem/calculateexpression: - modules/rsem/calculateexpression/** - tests/modules/rsem/calculateexpression/** diff --git a/tests/modules/roary/main.nf b/tests/modules/roary/main.nf new file mode 100644 index 00000000..a4a96d6e --- /dev/null +++ b/tests/modules/roary/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ROARY } from '../../../modules/roary/main.nf' addParams( options: [:] ) + +workflow test_roary { + + input = [ [ id:'test', single_end:false ], // meta map + [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + ] + + ROARY ( input ) +} diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml new file mode 100644 index 00000000..c8e8c33d --- /dev/null +++ b/tests/modules/roary/test.yml @@ -0,0 +1,39 @@ +- name: roary test_roary + command: nextflow run tests/modules/roary -entry test_roary -c tests/config/nextflow.config + tags: + - roary + files: + - path: output/roary/results/accessory.header.embl + contains: ['ID Genome standard; DNA; PRO; 1234 BP.'] + - path: output/roary/results/accessory.tab + contains: ['FT'] + - path: output/roary/results/accessory_binary_genes.fa + md5sum: 0baeea4947bf17a2bf29d43a44f0278f + - path: output/roary/results/accessory_binary_genes.fa.newick + md5sum: b1f8c76ab231bd38b850c1f8d3c1584b + - path: output/roary/results/accessory_graph.dot + contains: ['/* list of nodes */'] + - path: output/roary/results/blast_identity_frequency.Rtab + md5sum: 829baa25c3fad94b1af207265452a692 + - path: output/roary/results/clustered_proteins + contains: ['JKHLNHAL_00087'] + - path: output/roary/results/core_accessory.header.embl + contains: ['ID Genome standard; DNA; PRO; 1234 BP.'] + - path: output/roary/results/core_accessory.tab + contains: ['FT /taxa="GCF_000292685 GCF_000298385 GCF_002849995"'] + - path: output/roary/results/core_accessory_graph.dot + contains: ['/* list of nodes */'] + - path: output/roary/results/gene_presence_absence.Rtab + contains: ['Gene'] + - path: output/roary/results/gene_presence_absence.csv + contains: ['"Gene","Non-unique Gene name","Annotation","No. isolates","No. sequences"'] + - path: output/roary/results/number_of_conserved_genes.Rtab + contains: ['279'] + - path: output/roary/results/number_of_genes_in_pan_genome.Rtab + contains: ['279'] + - path: output/roary/results/number_of_new_genes.Rtab + contains: ['279'] + - path: output/roary/results/number_of_unique_genes.Rtab + contains: ['279'] + - path: output/roary/results/summary_statistics.txt + md5sum: 3921b5445df6a7ed59408119b8860a58 From ad0f4d2e2431183881983460f616eb3af2f02606 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 15:15:08 -0600 Subject: [PATCH 125/314] patch output extension in csvtk/concat (#797) * patch output extension * Update main.nf * Update main.nf * Update main.nf * whitespace * Update main.nf * Update main.nf * Update modules/csvtk/concat/main.nf Co-authored-by: Harshil Patel --- modules/csvtk/concat/main.nf | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index afccf722..194b1e14 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -24,13 +24,14 @@ process CSVTK_CONCAT { val out_format output: - tuple val(meta), path("*.${out_format}"), emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}.${out_extension}"), emit: csv + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) + out_extension = out_format == "tsv" ? 'tsv' : 'csv' """ csvtk \\ concat \\ @@ -38,7 +39,7 @@ process CSVTK_CONCAT { --num-cpus $task.cpus \\ --delimiter "${delimiter}" \\ --out-delimiter "${out_delimiter}" \\ - --out-file ${prefix}.${out_format} \\ + --out-file ${prefix}.${out_extension} \\ $csv cat <<-END_VERSIONS > versions.yml From e77b3d72f3357462376695585e88a598809cf0ed Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 15:21:35 -0600 Subject: [PATCH 126/314] add spatyper module (#784) * add spatyper module * lint fix * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/spatyper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/spatyper/main.nf | 46 +++++++++++++++++++ modules/spatyper/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/spatyper/main.nf | 26 +++++++++++ tests/modules/spatyper/test.yml | 15 +++++++ 6 files changed, 219 insertions(+) create mode 100644 modules/spatyper/functions.nf create mode 100644 modules/spatyper/main.nf create mode 100644 modules/spatyper/meta.yml create mode 100644 tests/modules/spatyper/main.nf create mode 100644 tests/modules/spatyper/test.yml diff --git a/modules/spatyper/functions.nf b/modules/spatyper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/spatyper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf new file mode 100644 index 00000000..ce320bfc --- /dev/null +++ b/modules/spatyper/main.nf @@ -0,0 +1,46 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SPATYPER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::spatyper=0.3.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3" + } else { + container "quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3" + } + + input: + tuple val(meta), path(fasta) + path repeats + path repeat_order + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" + """ + env + spaTyper \\ + $options.args \\ + $input_args \\ + --fasta $fasta \\ + --output ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) + END_VERSIONS + """ +} diff --git a/modules/spatyper/meta.yml b/modules/spatyper/meta.yml new file mode 100644 index 00000000..94f17a69 --- /dev/null +++ b/modules/spatyper/meta.yml @@ -0,0 +1,50 @@ +name: spatyper +description: Computational method for finding spa types. +keywords: + - fasta + - spatype +tools: + - spatyper: + description: Computational method for finding spa types. + homepage: https://github.com/HCGB-IGTP/spaTyper + documentation: https://github.com/HCGB-IGTP/spaTyper + tool_dev_url: https://github.com/HCGB-IGTP/spaTyper + doi: https://doi.org/10.5281/zenodo.4063625 + licence: ['LGPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - repeats: + type: file + description: spa repeat sequences in FASTA format (Optional) + pattern: "*.{fasta}" + - repeat_order: + type: file + description: spa types and order of repeats (Optional) + pattern: "*.{txt}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited results + pattern: "*.{tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 19c17f40..39a21981 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -951,6 +951,10 @@ spades: - modules/spades/** - tests/modules/spades/** +spatyper: + - modules/spatyper/** + - tests/modules/spatyper/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** diff --git a/tests/modules/spatyper/main.nf b/tests/modules/spatyper/main.nf new file mode 100644 index 00000000..65729cc0 --- /dev/null +++ b/tests/modules/spatyper/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SPATYPER } from '../../../modules/spatyper/main.nf' addParams( options: [:] ) +include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' addParams( options: [args: '--do_enrich'] ) + +workflow test_spatyper { + input = [ [ id:'test' ], + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + repeats = [] + repeat_order = [] + + SPATYPER ( input, repeats, repeat_order ) +} + +workflow test_spatyper_enrich { + input = [ [ id:'test' ], + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + repeats = [] + repeat_order = [] + + SPATYPER_ENRICH ( input, repeats, repeat_order ) +} diff --git a/tests/modules/spatyper/test.yml b/tests/modules/spatyper/test.yml new file mode 100644 index 00000000..49516812 --- /dev/null +++ b/tests/modules/spatyper/test.yml @@ -0,0 +1,15 @@ +- name: spatyper test_spatyper + command: nextflow run tests/modules/spatyper -entry test_spatyper -c tests/config/nextflow.config + tags: + - spatyper + files: + - path: output/spatyper/test.tsv + md5sum: a698352823875171696e5e7ed7015c13 + +- name: spatyper test_spatyper_enrich + command: nextflow run tests/modules/spatyper -entry test_spatyper_enrich -c tests/config/nextflow.config + tags: + - spatyper + files: + - path: output/spatyper/test.tsv + md5sum: a698352823875171696e5e7ed7015c13 From 053797510df35ec45a97f3cdafeb53d6fe39b225 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 5 Oct 2021 15:28:25 -0600 Subject: [PATCH 127/314] add pirate module (#777) * new module pirate * remove md5 check for non reproducible binary files * get those to-dos out * Update main.nf * Update meta.yml * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/pirate/functions.nf | 78 ++++++++++++++++++++++++++ modules/pirate/main.nf | 43 +++++++++++++++ modules/pirate/meta.yml | 47 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pirate/main.nf | 16 ++++++ tests/modules/pirate/test.yml | 98 +++++++++++++++++++++++++++++++++ 6 files changed, 286 insertions(+) create mode 100644 modules/pirate/functions.nf create mode 100644 modules/pirate/main.nf create mode 100644 modules/pirate/meta.yml create mode 100644 tests/modules/pirate/main.nf create mode 100644 tests/modules/pirate/test.yml diff --git a/modules/pirate/functions.nf b/modules/pirate/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/pirate/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf new file mode 100644 index 00000000..01a950dd --- /dev/null +++ b/modules/pirate/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PIRATE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pirate=1.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1" + } else { + container "quay.io/biocontainers/pirate:1.0.4--hdfd78af_1" + } + + input: + tuple val(meta), path(gff) + + output: + tuple val(meta), path("results/*") , emit: results + tuple val(meta), path("results/core_alignment.fasta"), optional: true, emit: aln + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + PIRATE \\ + $options.args \\ + --threads $task.cpus \\ + --input ./ \\ + --output results/ + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) + END_VERSIONS + """ +} diff --git a/modules/pirate/meta.yml b/modules/pirate/meta.yml new file mode 100644 index 00000000..296dd11d --- /dev/null +++ b/modules/pirate/meta.yml @@ -0,0 +1,47 @@ +name: pirate +description: Pangenome toolbox for bacterial genomes +keywords: + - gff + - pan-genome + - alignment +tools: + - pirate: + description: Pangenome analysis and threshold evaluation toolbox + homepage: https://github.com/SionBayliss/PIRATE + documentation: https://github.com/SionBayliss/PIRATE/wiki + tool_dev_url: https://github.com/SionBayliss/PIRATE + doi: "https://doi.org/10.1093/gigascience/giz119" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gff: + type: file + description: A set of GFF3 formatted files + pattern: "*.{gff}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: directory + description: Directory containing PIRATE result files + pattern: "*/*" + - aln: + type: file + description: Core-genome alignment produced by PIRATE (Optional) + pattern: "*.{fasta}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 39a21981..ec3e8ed4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -755,6 +755,10 @@ picard/sortsam: - modules/picard/sortsam/** - tests/modules/picard/sortsam/** +pirate: + - modules/pirate/** + - tests/modules/pirate/** + plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** diff --git a/tests/modules/pirate/main.nf b/tests/modules/pirate/main.nf new file mode 100644 index 00000000..5957b1e6 --- /dev/null +++ b/tests/modules/pirate/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PIRATE } from '../../../modules/pirate/main.nf' addParams( options: [:] ) + +workflow test_pirate { + + input = [ [ id:'test', single_end:false ], // meta map + [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + ] + + PIRATE ( input ) +} diff --git a/tests/modules/pirate/test.yml b/tests/modules/pirate/test.yml new file mode 100644 index 00000000..d8c4d0c4 --- /dev/null +++ b/tests/modules/pirate/test.yml @@ -0,0 +1,98 @@ +- name: pirate test_pirate + command: nextflow run tests/modules/pirate -entry test_pirate -c tests/config/nextflow.config + tags: + - pirate + files: + - path: output/pirate/results/PIRATE.gene_families.ordered.tsv + contains: ['allele_name'] + - path: output/pirate/results/PIRATE.gene_families.tsv + contains: ['allele_name'] + - path: output/pirate/results/PIRATE.genomes_per_allele.tsv + contains: ['g0197'] + - path: output/pirate/results/PIRATE.log + contains: ['PIRATE input options'] + - path: output/pirate/results/PIRATE.pangenome_summary.txt + md5sum: 4551c291bc06b21f984f25c09329ed7d + - path: output/pirate/results/PIRATE.unique_alleles.tsv + contains: ['allele_name'] + - path: output/pirate/results/binary_presence_absence.fasta + contains: ['GCF_000292685'] + - path: output/pirate/results/binary_presence_absence.nwk + md5sum: 5b5d86bf97d97de37bb9db514abb7762 + - path: output/pirate/results/cluster_alleles.tab + contains: ['g0001'] + - path: output/pirate/results/co-ords/GCF_000292685.co-ords.tab + md5sum: d5ca0f06ca7ea1f5486683d5859bc9b8 + - path: output/pirate/results/co-ords/GCF_000298385.co-ords.tab + md5sum: a24d6048b3074242bb558c7fa27a8b03 + - path: output/pirate/results/co-ords/GCF_002849995.co-ords.tab + md5sum: 0c08228585f4fa95686e9b025e0fe9c1 + - path: output/pirate/results/genome2loci.tab + md5sum: bbcea5bfcdcafe14a9aa7261c8e931b8 + - path: output/pirate/results/genome_list.txt + md5sum: 6534b1635c258ad92b829077addc1ff5 + - path: output/pirate/results/link_clusters.log + contains: ['parsing paralog file'] + - path: output/pirate/results/loci_list.tab + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/loci_paralog_categories.tab + md5sum: 6404d2a32526a398f42d7da768a389bd + - path: output/pirate/results/modified_gffs/GCF_000292685.gff + md5sum: 2b73bda2f84dc634303dc90e641040ca + - path: output/pirate/results/modified_gffs/GCF_000298385.gff + md5sum: b1a9d6557d47e09249f08a7acdbbd618 + - path: output/pirate/results/modified_gffs/GCF_002849995.gff + md5sum: 68532fc9bb639e6d83c731a069f60cf8 + - path: output/pirate/results/pan_sequences.fasta + md5sum: ed835c77fdb20c36aa9d5208eb7ca0cb + - path: output/pirate/results/pangenome.connected_blocks.tsv + contains: ['block_number'] + - path: output/pirate/results/pangenome.edges + contains: ['g0259'] + - path: output/pirate/results/pangenome.gfa + contains: ['g0001'] + - path: output/pirate/results/pangenome.order.tsv + contains: ['g0172'] + - path: output/pirate/results/pangenome.reversed.tsv + md5sum: b2396ce09a6e4178761eca6dc7f4434f + - path: output/pirate/results/pangenome.syntenic_blocks.tsv + contains: ['g0091'] + - path: output/pirate/results/pangenome.temp + - path: output/pirate/results/pangenome_iterations/pan_sequences.50.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.60.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.70.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.80.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.90.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.95.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.98.reclustered.reinflated + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.blast.output + md5sum: 9da25d27684bfcc5488987ab2d1fd3a1 + - path: output/pirate/results/pangenome_iterations/pan_sequences.cdhit_clusters + contains: ['GCF_000298385_00081'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.core_clusters.tab + contains: ['GCF_000298385_00242'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.mcl_log.txt + contains: ['chaos'] + - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta + md5sum: 84668b6c65b57026a17a50b0edd02541 + - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.pdb + - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.pot + - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.ptf + - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.pto + - path: output/pirate/results/pangenome_log.txt + contains: ['Creating pangenome on amino acid'] + - path: output/pirate/results/paralog_clusters.tab + contains: ['g0216'] + - path: output/pirate/results/representative_sequences.faa + contains: ['representative_genome'] + - path: output/pirate/results/representative_sequences.ffn + contains: ['representative_genome'] + - path: output/pirate/results/split_groups.log + contains: ['g0213'] From aa32a8a72eab0c0fb4c748678916be57126a4f61 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Tue, 5 Oct 2021 22:52:00 +0100 Subject: [PATCH 128/314] new module: gatk4/calculatecontamination (#778) * initiated files for calculate contamination * pushing local repo to remote * created script, filled in meta yml, created tests and test yml. local checks passing, needs repo side test data * added option and tests for outputting optional segmentation file * saving for test push * versions updated, test data added * Update main.nf * fixed versions info, should report correctly now * small update to main.nf outputs formatting * Apply suggestions from code review * Update test_data.config * Apply suggestions from code review Co-authored-by: GCJMackenzie Co-authored-by: Harshil Patel --- .../gatk4/calculatecontamination/functions.nf | 78 +++++++++++++++++++ modules/gatk4/calculatecontamination/main.nf | 47 +++++++++++ modules/gatk4/calculatecontamination/meta.yml | 53 +++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 2 + .../gatk4/calculatecontamination/main.nf | 38 +++++++++ .../gatk4/calculatecontamination/test.yml | 28 +++++++ 7 files changed, 250 insertions(+) create mode 100644 modules/gatk4/calculatecontamination/functions.nf create mode 100644 modules/gatk4/calculatecontamination/main.nf create mode 100644 modules/gatk4/calculatecontamination/meta.yml create mode 100644 tests/modules/gatk4/calculatecontamination/main.nf create mode 100644 tests/modules/gatk4/calculatecontamination/test.yml diff --git a/modules/gatk4/calculatecontamination/functions.nf b/modules/gatk4/calculatecontamination/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/calculatecontamination/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf new file mode 100644 index 00000000..bfe9b8fd --- /dev/null +++ b/modules/gatk4/calculatecontamination/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_CALCULATECONTAMINATION { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(pileup), path(matched) + val segmentout + + output: + tuple val(meta), path('*.contamination.table') , emit: contamination + tuple val(meta), path('*.segmentation.table') , optional:true, emit: segmentation + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def matched_command = matched ? " -matched ${matched} " : '' + def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' + """ + gatk CalculateContamination \\ + -I $pileup \\ + $matched_command \\ + -O ${prefix}.contamination.table \\ + $segment_command \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/calculatecontamination/meta.yml b/modules/gatk4/calculatecontamination/meta.yml new file mode 100644 index 00000000..0d1b9b85 --- /dev/null +++ b/modules/gatk4/calculatecontamination/meta.yml @@ -0,0 +1,53 @@ +name: gatk4_calculatecontamination +description: | + Calculates the fraction of reads from cross-sample contamination based on summary tables from getpileupsummaries. Output to be used with filtermutectcalls. +keywords: + - gatk4 + - calculatecontamination + - cross-samplecontamination + - getpileupsummaries + - filtermutectcalls +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - pileup: + type: file + description: File containing the pileups summary table of a tumor sample to be used to calculate contamination. + pattern: "*.pileups.table" + - matched: + type: file + description: File containing the pileups summary table of a normal sample that matches with the tumor sample specified in pileup argument. This is an optional input. + pattern: "*.pileups.table" + - segmentout: + type: boolean + description: specifies whether to output the segmentation table. + +output: + - contamination: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation: + type: file + description: optional output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ec3e8ed4..340ea28d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -386,6 +386,10 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** +gatk4/calculatecontamination: + - modules/gatk4/calculatecontamination/** + - tests/modules/gatk4/calculatecontamination/** + gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 8d5ecd92..8fcc3d0b 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -153,6 +153,8 @@ params { test_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.baserecalibrator.table" test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" + test_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.pileups.table" + test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" test_genome_vcf = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test.genome.vcf" test_genome_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test.genome.vcf.gz" diff --git a/tests/modules/gatk4/calculatecontamination/main.nf b/tests/modules/gatk4/calculatecontamination/main.nf new file mode 100644 index 00000000..f93f66fb --- /dev/null +++ b/tests/modules/gatk4/calculatecontamination/main.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' addParams( options: [:] ) + +workflow test_gatk4_calculatecontamination_tumor_only { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), + [] ] + + segmentout = false + + GATK4_CALCULATECONTAMINATION ( input, segmentout ) +} + +workflow test_gatk4_calculatecontamination_matched_pair { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_pileups_table'], checkIfExists: true) ] + + segmentout = false + + GATK4_CALCULATECONTAMINATION ( input, segmentout ) +} + +workflow test_gatk4_calculatecontamination_segmentation { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_pileups_table'], checkIfExists: true) ] + + segmentout = true + + GATK4_CALCULATECONTAMINATION ( input, segmentout ) +} diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml new file mode 100644 index 00000000..8736bc32 --- /dev/null +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -0,0 +1,28 @@ +- name: gatk4 calculatecontamination test_gatk4_calculatecontamination_tumor_only + command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c tests/config/nextflow.config + tags: + - gatk4/calculatecontamination + - gatk4 + files: + - path: output/gatk4/test.contamination.table + md5sum: ff348a26dd09404239a7ed0da7d98874 + +- name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair + command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config + tags: + - gatk4/calculatecontamination + - gatk4 + files: + - path: output/gatk4/test.contamination.table + md5sum: ff348a26dd09404239a7ed0da7d98874 + +- name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation + command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config + tags: + - gatk4/calculatecontamination + - gatk4 + files: + - path: output/gatk4/test.contamination.table + md5sum: ff348a26dd09404239a7ed0da7d98874 + - path: output/gatk4/test.segmentation.table + md5sum: 478cb4f69ec001944b9cd0e7e4de01ef From bcf2681b037ba971984a64181b660b6591335f54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Tue, 5 Oct 2021 23:16:45 +0100 Subject: [PATCH 129/314] New module: `pbbam/pbmerge` (#752) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 📦 NEW: Add pbbam/pbmerge module * 🐛 FIX: Add optional arguments to command line * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update test data config and test script * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Add some pacbio test files * 📦 NEW: Add pbbam/pbmerge module * 🐛 FIX: Add optional arguments to command line * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update test data config and test script * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update and clean code * 🐛 FIX: Update module path in test * 🐛 FIX: Add missing () + correct module path in test * 👌 IMPROVE: Update pbmerge from version 1.6.0 to 1.7.0 * 👌 IMPROVE: Change output filename suffix for something more generic * 🐛 Update test.yml * Apply suggestions from code review * Update tests/modules/pbbam/pbmerge/test.yml * Update tests/modules/pbbam/pbmerge/main.nf Co-authored-by: Gregor Sturm Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/pbbam/pbmerge/functions.nf | 78 ++++++++++++++++++++++++++++ modules/pbbam/pbmerge/main.nf | 42 +++++++++++++++ modules/pbbam/pbmerge/meta.yml | 46 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pbbam/pbmerge/main.nf | 18 +++++++ tests/modules/pbbam/pbmerge/test.yml | 10 ++++ 6 files changed, 198 insertions(+) create mode 100644 modules/pbbam/pbmerge/functions.nf create mode 100644 modules/pbbam/pbmerge/main.nf create mode 100644 modules/pbbam/pbmerge/meta.yml create mode 100644 tests/modules/pbbam/pbmerge/main.nf create mode 100644 tests/modules/pbbam/pbmerge/test.yml diff --git a/modules/pbbam/pbmerge/functions.nf b/modules/pbbam/pbmerge/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/pbbam/pbmerge/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf new file mode 100644 index 00000000..63cd2ffe --- /dev/null +++ b/modules/pbbam/pbmerge/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PBBAM_PBMERGE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pbbam=1.7.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1" + } else { + container "quay.io/biocontainers/pbbam:1.7.0--h058f120_1" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.pbi"), emit: pbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + pbmerge \\ + -o ${prefix}.bam \\ + $options.args \\ + *.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + pbbam/pbmerge: \$( pbmerge --version|sed 's/pbmerge //' ) + END_VERSIONS + """ +} diff --git a/modules/pbbam/pbmerge/meta.yml b/modules/pbbam/pbmerge/meta.yml new file mode 100644 index 00000000..c483ca40 --- /dev/null +++ b/modules/pbbam/pbmerge/meta.yml @@ -0,0 +1,46 @@ +name: pbbam_pbmerge +description: The pbbam software package provides components to create, query, & edit PacBio BAM files and associated indices. These components include a core C++ library, bindings for additional languages, and command-line utilities. +keywords: + - pbbam + - pbbam/pbmerge +tools: + - pbbam: + description: PacBio BAM C++ library + homepage: https://github.com/PacificBiosciences/pbbioconda + documentation: https://pbbam.readthedocs.io/en/latest/tools/pbmerge.html + tool_dev_url: https://github.com/pacificbiosciences/pbbam/ + doi: "" + licence: ['BSD-3-clause-Clear'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM files to merge + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: The merged bam file + pattern: "*.bam" + - pbi: + type: file + description: BAM Pacbio index file + pattern: "*.bam.pbi" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 340ea28d..556f6483 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -731,6 +731,10 @@ pangolin: - modules/pangolin/** - tests/modules/pangolin/** +pbbam/pbmerge: + - modules/pbbam/pbmerge/** + - tests/modules/pbbam/pbmerge/** + pbccs: - modules/pbccs/** - tests/modules/pbccs/** diff --git a/tests/modules/pbbam/pbmerge/main.nf b/tests/modules/pbbam/pbmerge/main.nf new file mode 100644 index 00000000..9220af0c --- /dev/null +++ b/tests/modules/pbbam/pbmerge/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' addParams( options: [suffix: '.merged'] ) + +workflow test_pbbam_pbmerge { + + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['homo_sapiens']['pacbio']['cluster'] , checkIfExists: true), + file(params.test_data['homo_sapiens']['pacbio']['singletons'], checkIfExists: true) + ] + ] + + PBBAM_PBMERGE ( input ) +} diff --git a/tests/modules/pbbam/pbmerge/test.yml b/tests/modules/pbbam/pbmerge/test.yml new file mode 100644 index 00000000..4f334c0e --- /dev/null +++ b/tests/modules/pbbam/pbmerge/test.yml @@ -0,0 +1,10 @@ +- name: pbbam pbmerge test_pbbam_pbmerge + command: nextflow run tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c tests/config/nextflow.config + tags: + - pbbam/pbmerge + - pbbam + files: + - path: output/pbbam/test.merged.bam + md5sum: 727c7ba1289192085c06890dda70f973 + - path: output/pbbam/test.merged.bam.pbi + md5sum: edfadd3a81c598d1ee051899792db75d From 84f2302920078b0cf7716b2a2e5fcc0be5c4531d Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Wed, 6 Oct 2021 08:16:36 +0200 Subject: [PATCH 130/314] Correct parsing versions with trailing zeros (#795) * Correct parsing versions with trailing zeros * Fix test * Update modules/custom/dumpsoftwareversions/main.nf Co-authored-by: Harshil Patel * Fix tests and go back to output versions.yml * Update tests/test_versions_yml.py to use BaseLoader Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/custom/dumpsoftwareversions/main.nf | 2 +- tests/modules/custom/dumpsoftwareversions/main.nf | 4 ++-- tests/test_versions_yml.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index cf10a8e0..faf2073f 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -79,7 +79,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { } with open("$versions") as f: - workflow_versions = yaml.safe_load(f) | module_versions + workflow_versions = yaml.load(f, Loader=yaml.BaseLoader) | module_versions workflow_versions["Workflow"] = { "Nextflow": "$workflow.nextflow.version", diff --git a/tests/modules/custom/dumpsoftwareversions/main.nf b/tests/modules/custom/dumpsoftwareversions/main.nf index 94dbc5fb..020b19bd 100644 --- a/tests/modules/custom/dumpsoftwareversions/main.nf +++ b/tests/modules/custom/dumpsoftwareversions/main.nf @@ -17,8 +17,8 @@ workflow test_custom_dumpsoftwareversions { MULTIQC ( FASTQC.out.zip.collect { it[1] } ) ch_software_versions = Channel.empty() - ch_software_versions = ch_software_versions.mix(FASTQC.out.version) - ch_software_versions = ch_software_versions.mix(MULTIQC.out.version) + ch_software_versions = ch_software_versions.mix(FASTQC.out.versions) + ch_software_versions = ch_software_versions.mix(MULTIQC.out.versions) CUSTOM_DUMPSOFTWAREVERSIONS ( ch_software_versions.collectFile() ) } diff --git a/tests/test_versions_yml.py b/tests/test_versions_yml.py index b6392b87..759fc4d5 100644 --- a/tests/test_versions_yml.py +++ b/tests/test_versions_yml.py @@ -13,7 +13,8 @@ def _get_workflow_names(): here = Path(__file__).parent.resolve() pytest_workflow_files = here.glob("**/test.yml") for f in pytest_workflow_files: - test_config = yaml.safe_load(f.read_text()) + # test_config = yaml.safe_load(f.read_text()) + test_config = yaml.load(f.read_text(), Loader=yaml.BaseLoader) for workflow in test_config: yield workflow["name"] From 79d38a306bdaf07000e0d6f300684d3ed38c8919 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Wed, 6 Oct 2021 11:13:48 +0100 Subject: [PATCH 131/314] New module: `isoseq3/refine` (#748) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: Add isoseq3/refine module * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add parallelization * 🐛 FIX: Correct Typo * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Assign a value channel to primers input Improve workflow code readability * 👌 IMPROVE: Update to the version of templates * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Update test file * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 📦 NEW: Add isoseq3/refine module * 👌 IMPROVE: Add parallelization * 🐛 FIX: Correct Typo * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Assign a value channel to primers input Improve workflow code readability * 👌 IMPROVE: Update to the version of templates * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Update test file * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 👌 IMPROVE: Add one channel per output file * 👌 IMPROVE: Minor updates * 👌 IMPROVE: Minors Update - Remove TODO from test.yml - Remove useless piece of code * 📦 NEW: Add isoseq3/refine module * 👌 IMPROVE: Add parallelization * 🐛 FIX: Correct Typo * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Assign a value channel to primers input Improve workflow code readability * 👌 IMPROVE: Update to the version of templates * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Update test file * 👌 IMPROVE: Fill contains args * 📦 NEW: Add isoseq3/refine module * 👌 IMPROVE: Add parallelization * 🐛 FIX: Correct Typo * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Assign a value channel to primers input Improve workflow code readability * 👌 IMPROVE: Update to the version of templates * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Update test file * 👌 IMPROVE: Add one channel per output file * 👌 IMPROVE: Minor updates * 👌 IMPROVE: Minors Update - Remove TODO from test.yml - Remove useless piece of code * 🐛 FIX: Remove unwanted files * 🐛 FIX: Protect \ * 🐛 FIX: Remove test files * Apply suggestions from code review * Apply suggestions from code review * Update tests/modules/isoseq3/refine/test.yml Co-authored-by: Gregor Sturm Co-authored-by: Harshil Patel --- modules/isoseq3/refine/functions.nf | 78 +++++++++++++++++++++++++++ modules/isoseq3/refine/main.nf | 49 +++++++++++++++++ modules/isoseq3/refine/meta.yml | 62 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/isoseq3/refine/main.nf | 16 ++++++ tests/modules/isoseq3/refine/test.yml | 16 ++++++ 6 files changed, 225 insertions(+) create mode 100644 modules/isoseq3/refine/functions.nf create mode 100644 modules/isoseq3/refine/main.nf create mode 100644 modules/isoseq3/refine/meta.yml create mode 100644 tests/modules/isoseq3/refine/main.nf create mode 100644 tests/modules/isoseq3/refine/test.yml diff --git a/modules/isoseq3/refine/functions.nf b/modules/isoseq3/refine/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/isoseq3/refine/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf new file mode 100644 index 00000000..5a45eb2d --- /dev/null +++ b/modules/isoseq3/refine/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ISOSEQ3_REFINE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" + } else { + container "quay.io/biocontainers/isoseq3:3.4.0--0" + } + + input: + tuple val(meta), path(bam) + path primers + + output: + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.bam.pbi") , emit: pbi + tuple val(meta), path("*.consensusreadset.xml"), emit: consensusreadset + tuple val(meta), path("*.filter_summary.json") , emit: summary + tuple val(meta), path("*.report.csv") , emit: report + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + isoseq3 \\ + refine \\ + -j $task.cpus \\ + $options.args \\ + $bam \\ + $primers \\ + ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) + END_VERSIONS + """ +} diff --git a/modules/isoseq3/refine/meta.yml b/modules/isoseq3/refine/meta.yml new file mode 100644 index 00000000..81b57c7c --- /dev/null +++ b/modules/isoseq3/refine/meta.yml @@ -0,0 +1,62 @@ +name: isoseq3_refine +description: Generate transcripts by clustering HiFi reads +keywords: + - isoseq3 + - isoseq3/refine +tools: + - isoseq3: + description: IsoSeq3 - Scalable De Novo Isoform Discovery + homepage: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + documentation: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + tool_dev_url: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + doi: "" + licence: ['BSD-3-clause-Clear'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test ] + - bam: + type: file + description: BAM file, cleaned ccs generated by lima + pattern: "*.bam" + - primers: + type: file + description: fasta file of primers + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: Set of complete reads (with polyA tail), where the polyA has been trimmed + pattern: "*.bam" + - pbi: + type: file + description: Pacbio index file from polyA trimmed reads + pattern: "*.pbi" + - consensusreadset: + type: file + description: Metadata about read library + pattern: "*.xml" + - summary: + type: file + description: json file describing number of full length reads, full length non chimeric reads and full length non chimeric polyA reads + pattern: "*.json" + - report: + type: file + description: Metadata about primer and polyA detection (primers/polyA/insert length, strand, primer name) + pattern: "*.csv" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 556f6483..bf280360 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -525,6 +525,10 @@ iqtree: - modules/iqtree/** - tests/modules/iqtree/** +isoseq3/refine: + - modules/isoseq3/refine/** + - tests/modules/isoseq3/refine/** + ismapper: - modules/ismapper/** - tests/modules/ismapper/** diff --git a/tests/modules/isoseq3/refine/main.nf b/tests/modules/isoseq3/refine/main.nf new file mode 100644 index 00000000..13736604 --- /dev/null +++ b/tests/modules/isoseq3/refine/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' addParams( options: [suffix:'.refine'] ) + +workflow test_isoseq3_refine { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['lima'], checkIfExists: true), + ] + primers = file(params.test_data['homo_sapiens']['pacbio']['primers'], checkIfExists: true) + + ISOSEQ3_REFINE ( input, primers ) +} diff --git a/tests/modules/isoseq3/refine/test.yml b/tests/modules/isoseq3/refine/test.yml new file mode 100644 index 00000000..2e7782d3 --- /dev/null +++ b/tests/modules/isoseq3/refine/test.yml @@ -0,0 +1,16 @@ +- name: isoseq3 refine test_isoseq3_refine + command: nextflow run tests/modules/isoseq3/refine -entry test_isoseq3_refine -c tests/config/nextflow.config + tags: + - isoseq3 + - isoseq3/refine + files: + - path: output/isoseq3/test.refine.bam + md5sum: e8387afd5f66a7f6a89f90a0dcf3b823 + - path: output/isoseq3/test.refine.bam.pbi + md5sum: 8097cad9e472f2f79de6de5fe3dcc822 + - path: output/isoseq3/test.refine.consensusreadset.xml + contains: [ 'pbds:ConsensusReadSet' ] + - path: output/isoseq3/test.refine.filter_summary.json + md5sum: 87f8bdd5c60741f47b8a991e002f7ef3 + - path: output/isoseq3/test.refine.report.csv + md5sum: d42a139e5d9b08396bdb087c01243ea9 From 5a49d2c1bfdaa1204b83b172c3ca95cf972402b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Thu, 7 Oct 2021 10:06:02 +0100 Subject: [PATCH 132/314] New module: `isoseq3/cluster` (#801) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: Add isoseq3/cluster module * 🐛FIX: Fix reports channel and add .pbi to it * 🐛FIX: Fix report channel definition * 👌IMPROVE: Move .pbi file into reports channel * 👌IMPROVE: remove --use_qvs option from command line * 👌 IMPROVE: Add in addParams removed options from command line * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: add singletons parameter and improve outputs * 🐛 FIX: Update test with last module model * 👌 IMPROVE: Add test tag * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update test data config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Remove unused index * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 📦 NEW: Add isoseq3/cluster module * 🐛FIX: Fix reports channel and add .pbi to it * 🐛FIX: Fix report channel definition * 👌IMPROVE: Move .pbi file into reports channel * 👌IMPROVE: remove --use_qvs option from command line * 👌 IMPROVE: Add in addParams removed options from command line * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: add singletons parameter and improve outputs * 🐛 FIX: Update test with last module model * 👌 IMPROVE: Add test tag * 👌 IMPROVE: Update test data config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Remove unused index * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update code to new versions capture + better output channels * 👌 IMPROVE: Update with new versions.yml file * 🐛 FIX: Update meta.yml + correct typos * 👌 IMPROVE: Clean output file names + correct typo * 🐛 FIX: Remove bamtools/split module from isoseq3/cluster --- .gitignore | 4 +- modules/isoseq3/cluster/functions.nf | 78 +++++++++++++++++++++++++ modules/isoseq3/cluster/main.nf | 53 +++++++++++++++++ modules/isoseq3/cluster/meta.yml | 81 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/isoseq3/cluster/main.nf | 15 +++++ tests/modules/isoseq3/cluster/test.yml | 28 +++++++++ 7 files changed, 260 insertions(+), 3 deletions(-) create mode 100644 modules/isoseq3/cluster/functions.nf create mode 100644 modules/isoseq3/cluster/main.nf create mode 100644 modules/isoseq3/cluster/meta.yml create mode 100644 tests/modules/isoseq3/cluster/main.nf create mode 100644 tests/modules/isoseq3/cluster/test.yml diff --git a/.gitignore b/.gitignore index 06eae014..c773a2d0 100644 --- a/.gitignore +++ b/.gitignore @@ -5,11 +5,9 @@ test_output/ output/ .DS_Store *.code-workspace +tests/data/ .screenrc .*.sw? __pycache__ *.pyo *.pyc -tests/data/ -modules/modtest/ -tests/modules/modtest/ diff --git a/modules/isoseq3/cluster/functions.nf b/modules/isoseq3/cluster/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/isoseq3/cluster/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf new file mode 100644 index 00000000..f01af2bc --- /dev/null +++ b/modules/isoseq3/cluster/main.nf @@ -0,0 +1,53 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ISOSEQ3_CLUSTER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" + } else { + container "quay.io/biocontainers/isoseq3:3.4.0--0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.bam.pbi") , emit: pbi + tuple val(meta), path("*.cluster") , emit: cluster + tuple val(meta), path("*.cluster_report.csv"), emit: cluster_report + tuple val(meta), path("*.transcriptset.xml") , emit: transcriptset + tuple val(meta), path("*.hq.bam") , emit: hq_bam + tuple val(meta), path("*.hq.bam.pbi") , emit: hq_pbi + tuple val(meta), path("*.lq.bam") , emit: lq_bam + tuple val(meta), path("*.lq.bam.pbi") , emit: lq_pbi + path "versions.yml" , emit: versions + + tuple val(meta), path("*.singletons.bam") , optional: true, emit: singletons_bam + tuple val(meta), path("*.singletons.bam.pbi"), optional: true, emit: singletons_pbi + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + isoseq3 \\ + cluster \\ + $bam \\ + ${prefix}.bam \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + isoseq3 cluster: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) + END_VERSIONS + """ +} diff --git a/modules/isoseq3/cluster/meta.yml b/modules/isoseq3/cluster/meta.yml new file mode 100644 index 00000000..6fadb9c4 --- /dev/null +++ b/modules/isoseq3/cluster/meta.yml @@ -0,0 +1,81 @@ +name: isoseq3_cluster +description: IsoSeq3 - Cluster - Cluster trimmed consensus sequences +keywords: + - cluster +tools: + - isoseq3: + description: IsoSeq3 - Cluster - Cluster trimmed consensus sequences + homepage: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + documentation: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + tool_dev_url: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md + doi: "" + licence: ['BSD-3-clause-Clear'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: BAM file generated by isoseq3 refine + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - bam: + type: file + description: BAM file of clustered consensus + pattern: "*.bam" + - pbi: + type: file + description: Pacbio Index of consensus reads generated by clustering + pattern: "*.pbi" + - cluster: + type: file + description: A two columns (from, to) file describing original read name to new read name + pattern: "*.cluster" + - cluster_report: + type: file + description: A table files clusters (transcripts) members (read) + pattern: "*.cluster_report.csv" + - transcriptset: + type: file + description: A metadata xml file which contains full paths to data files + pattern: "*.clustered.transcriptset.xml" + - hq_bam: + type: file + description: High quality reads + pattern: "*.hq.bam" + - hq_pbi: + type: file + description: Pacbio index of high quality reads + pattern: "*.hq.bam.pbi" + - lq_bam: + type: file + description: Low quality reads + pattern: "*.lq.bam" + - lq_pbi: + type: file + description: Pacbio index of low quality reads + pattern: "*.lq.bam.pbi" + - singletons_bam: + type: file + description: Unclustered reads + pattern: "*.singletons.bam" + - singletons_pbi: + type: file + description: Pacbio index of unclustered reads + pattern: "*.singletons.bam.pbi" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index bf280360..6f0e491f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -525,6 +525,10 @@ iqtree: - modules/iqtree/** - tests/modules/iqtree/** +isoseq3/cluster: + - modules/isoseq3/cluster/** + - tests/modules/isoseq3/cluster/** + isoseq3/refine: - modules/isoseq3/refine/** - tests/modules/isoseq3/refine/** diff --git a/tests/modules/isoseq3/cluster/main.nf b/tests/modules/isoseq3/cluster/main.nf new file mode 100644 index 00000000..90a24c11 --- /dev/null +++ b/tests/modules/isoseq3/cluster/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' addParams( options: [args: '--singletons --use-qvs --verbose'] ) + +workflow test_isoseq3_cluster { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['refine'], checkIfExists: true), + ] + + ISOSEQ3_CLUSTER ( input ) +} diff --git a/tests/modules/isoseq3/cluster/test.yml b/tests/modules/isoseq3/cluster/test.yml new file mode 100644 index 00000000..cc6b6dac --- /dev/null +++ b/tests/modules/isoseq3/cluster/test.yml @@ -0,0 +1,28 @@ +- name: isoseq3 cluster test_isoseq3_cluster + command: nextflow run tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c tests/config/nextflow.config + tags: + - isoseq3 + - isoseq3/cluster + files: + - path: output/isoseq3/test.bam + md5sum: ca8277f4d8fe1bba68ba266c42b46dd1 + - path: output/isoseq3/test.bam.pbi + md5sum: cbc06657b4543faba7ff886b3b12b862 + - path: output/isoseq3/test.cluster + md5sum: d5059d856763fc5591332980bfc0d57b + - path: output/isoseq3/test.cluster_report.csv + md5sum: 342d97dc10aedf80a45977edcb491c62 + - path: output/isoseq3/test.hq.bam + md5sum: e93ea85776c35c246364d954032c2ea9 + - path: output/isoseq3/test.hq.bam.pbi + md5sum: 5a8ea7668e8f8e173478b28cbb6ab515 + - path: output/isoseq3/test.lq.bam + md5sum: 4ea0e4f4a6cc689dcc275adcdf688fad + - path: output/isoseq3/test.lq.bam.pbi + md5sum: f5edc24711b2c8d6474d60cb69022af0 + - path: output/isoseq3/test.singletons.bam + md5sum: 73d131920bd42e1fc5fca2e6cb71f4b2 + - path: output/isoseq3/test.singletons.bam.pbi + md5sum: 73980863be4b5bda2846325c737f0b5e + - path: output/isoseq3/test.transcriptset.xml + contains: [ 'PacBio.DataSet.TranscriptSet' ] From f479d4fb8d634b5ac5bd1c879917dc285abc076d Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Thu, 7 Oct 2021 13:55:06 +0100 Subject: [PATCH 133/314] Learnreadorientationmodel (#794) * files for learnreadorientationmodel initialised for first commit * finished scripts and yml files. test working locally but needs an f1r2 test data on nf-core before it can be submitted * updated test data location * versions file updated, test data added * updated versions file, edited test file * small formatting update to main.nf * Update main.nf * Update test_data.config * updated tests main.nf * Update test_data.config * Apply suggestions from code review * Update modules/gatk4/learnreadorientationmodel/main.nf * Update modules/gatk4/learnreadorientationmodel/meta.yml * fixed tests failing Co-authored-by: GCJMackenzie Co-authored-by: Harshil Patel --- .../learnreadorientationmodel/functions.nf | 78 +++++++++++++++++++ .../gatk4/learnreadorientationmodel/main.nf | 44 +++++++++++ .../gatk4/learnreadorientationmodel/meta.yml | 41 ++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 8 ++ .../gatk4/learnreadorientationmodel/main.nf | 13 ++++ .../gatk4/learnreadorientationmodel/test.yml | 7 ++ 7 files changed, 195 insertions(+) create mode 100644 modules/gatk4/learnreadorientationmodel/functions.nf create mode 100644 modules/gatk4/learnreadorientationmodel/main.nf create mode 100644 modules/gatk4/learnreadorientationmodel/meta.yml create mode 100644 tests/modules/gatk4/learnreadorientationmodel/main.nf create mode 100644 tests/modules/gatk4/learnreadorientationmodel/test.yml diff --git a/modules/gatk4/learnreadorientationmodel/functions.nf b/modules/gatk4/learnreadorientationmodel/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/learnreadorientationmodel/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf new file mode 100644 index 00000000..0a499def --- /dev/null +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_LEARNREADORIENTATIONMODEL { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(f1r2) + + output: + tuple val(meta), path("*.tar.gz"), emit: artifactprior + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def inputs_list = [] + f1r2.each() { a -> inputs_list.add(" -I " + a) } + """ + gatk \\ + LearnReadOrientationModel \\ + ${inputs_list.join(' ')} \\ + -O ${prefix}.tar.gz \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/learnreadorientationmodel/meta.yml b/modules/gatk4/learnreadorientationmodel/meta.yml new file mode 100644 index 00000000..c15b48cb --- /dev/null +++ b/modules/gatk4/learnreadorientationmodel/meta.yml @@ -0,0 +1,41 @@ +name: gatk4_learnreadorientationmodel +description: | + Uses f1r2 counts collected during mutect2 to Learn the prior probability of read orientation artifacts +keywords: + - gatk4 + - learnreadorientationmodel + - readorientationartifacts + - mutect2 +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - f1r2: + type: list + description: list of f1r2 files to be used as input. + pattern: "*.f1r2.tar.gz" + +output: + - artifactprior: + type: file + description: file containing artifact-priors to be used by filtermutectcalls + pattern: "*.tar.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6f0e491f..45a4d62c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -410,6 +410,10 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** +gatk4/learnreadorientationmodel: + - modules/gatk4/learnreadorientationmodel/** + - tests/modules/gatk4/learnreadorientationmodel/** + gatk4/markduplicates: - modules/gatk4/markduplicates/** - tests/modules/gatk4/markduplicates/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 8fcc3d0b..30e6f1ea 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -155,6 +155,14 @@ params { test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" test_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.pileups.table" test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" + + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" + test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" + test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" + test_test2_paired_mutect2_calls_f1r2_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.f1r2.tar.gz" + test_test2_paired_mutect2_calls_artifact_prior_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired_mutect2_calls.artifact-prior.tar.gz" + test_test2_paired_segmentation_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired.segmentation.table" + test_test2_paired_contamination_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired.contamination.table" test_genome_vcf = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test.genome.vcf" test_genome_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test.genome.vcf.gz" diff --git a/tests/modules/gatk4/learnreadorientationmodel/main.nf b/tests/modules/gatk4/learnreadorientationmodel/main.nf new file mode 100644 index 00000000..1a71873e --- /dev/null +++ b/tests/modules/gatk4/learnreadorientationmodel/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_LEARNREADORIENTATIONMODEL } from '../../../../modules/gatk4/learnreadorientationmodel/main.nf' addParams( options: [suffix:'.artifact-prior'] ) + +workflow test_gatk4_learnreadorientationmodel { + + input = [ [ id:'test' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_f1r2_tar_gz'], checkIfExists: true)] ] + + GATK4_LEARNREADORIENTATIONMODEL ( input ) +} diff --git a/tests/modules/gatk4/learnreadorientationmodel/test.yml b/tests/modules/gatk4/learnreadorientationmodel/test.yml new file mode 100644 index 00000000..6e999fa6 --- /dev/null +++ b/tests/modules/gatk4/learnreadorientationmodel/test.yml @@ -0,0 +1,7 @@ +- name: gatk4 learnreadorientationmodel test_gatk4_learnreadorientationmodel + command: nextflow run tests/modules/gatk4/learnreadorientationmodel -entry test_gatk4_learnreadorientationmodel -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/learnreadorientationmodel + files: + - path: output/gatk4/test.artifact-prior.tar.gz From c19671dca974354978c9bc1711fca6fe681bdb0b Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 8 Oct 2021 15:02:42 +0000 Subject: [PATCH 134/314] Subworkflow Infrastructure (#662) * feat(subworkflows): Add align_bowtie2 subworkflow For testing CI setup * test(align_bowtie2): Add initial list of changes to test * test(align_bowtie2): Add initial test * refactor: Use tags to run subworkflows ci For every underlying module used by workflow and allow the modules pytest-modules definition be the source of truth. * refactor: Use individual directories for subworkflows * docs(align_bowtie2): Add initial meta.yml Copied most of it from the bowtie2/align module. * fix(align_bowtie2): Fix module include paths * test(bam_sort_samtools): Add initial test * ci(bam_sort_samtools): Add modules that trigger the tag * test(bam_stats_samtools): Add initial test * ci(bam_stats_samtools): Add keys to pick up changes * docs(bam_samtools): Add initial meta.yml * test(align_bowtie2): Fix path to subworkflow * test(align_bowtie2): Update entry point * fix(bam_sort_samtools): Update include paths * test(bam_sort_samtools): Fix path * style: Clean up addParams * test(samtools_sort): Add suffix for test * test(align_bowtie2): Add samtools_options for suffix * test(bam_stats_samtools): Update path * test(bam_stats_samtools): Use stats input Otherwise it's just an example of how it's used in the bam_sort_samtools subworkflow * ci(linting): Skip module linting of subworkflows * ci(linting): Clean up startsWith statement * test(bam_stats_samtools): Use single end test data for single end test * test(bam_stats_samtools): Add expected files * test(align_bowtie2): Add paired-end test * test(align_bowtie2): Sort order of output * test(align_bowtie2): Update hashes * docs(align_bowtie2): Fix typo * test(align_bowtie2): Update samtools output names * test(align_bowtie2): Remove md5sums for bam/bai * feat(subworkflows): Add nextflow.configs These can be used for default settings in the future. They can then be included in the conf/modules.config so that the params don't have to be duplicated in the root nextflow.config. * docs(subworkflows): Include modules instead of tools * fix: Update to versions * chore(align_bowtie2): Remove duplicate tag * style: Format yamls * test(subworkflows): Only check versions for modules * chore: Update subworkflows to match rnaseq dev * fix(subworkflows): Update paths * fix(bam_sort_samtools): Fix sort parameters for testing * Apply suggestions from code review Co-authored-by: Harshil Patel * docs: Update TODOs with a message * ci: Try using a matrix for strategy * ci: Try passing an array * Revert "ci: Try passing an array" This reverts commit d3611fcd8332bbb9a8501e8dd299d0a623aaecaa. Co-authored-by: Harshil Patel --- .github/workflows/nf-core-linting.yml | 2 + .github/workflows/pytest-workflow.yml | 7 +- subworkflows/nf-core/align_bowtie2/main.nf | 47 +++++++++++ subworkflows/nf-core/align_bowtie2/meta.yml | 50 ++++++++++++ .../nf-core/align_bowtie2/nextflow.config | 2 + .../nf-core/bam_sort_samtools/main.nf | 53 ++++++++++++ .../nf-core/bam_sort_samtools/meta.yml | 41 ++++++++++ .../nf-core/bam_sort_samtools/nextflow.config | 1 + .../nf-core/bam_stats_samtools/main.nf | 33 ++++++++ .../nf-core/bam_stats_samtools/meta.yml | 43 ++++++++++ .../bam_stats_samtools/nextflow.config | 1 + tests/config/pytest_subworkflows.yml | 11 +++ tests/modules/samtools/sort/main.nf | 2 +- tests/modules/samtools/sort/test.yml | 4 +- .../nf-core/align_bowtie2/main.nf | 27 +++++++ .../nf-core/align_bowtie2/test.yml | 81 +++++++++++++++++++ .../nf-core/bam_sort_samtools/main.nf | 21 +++++ .../nf-core/bam_sort_samtools/test.yml | 47 +++++++++++ .../nf-core/bam_stats_samtools/main.nf | 23 ++++++ .../nf-core/bam_stats_samtools/test.yml | 31 +++++++ tests/test_versions_yml.py | 2 +- 21 files changed, 524 insertions(+), 5 deletions(-) create mode 100644 subworkflows/nf-core/align_bowtie2/main.nf create mode 100644 subworkflows/nf-core/align_bowtie2/meta.yml create mode 100644 subworkflows/nf-core/align_bowtie2/nextflow.config create mode 100644 subworkflows/nf-core/bam_sort_samtools/main.nf create mode 100644 subworkflows/nf-core/bam_sort_samtools/meta.yml create mode 100644 subworkflows/nf-core/bam_sort_samtools/nextflow.config create mode 100644 subworkflows/nf-core/bam_stats_samtools/main.nf create mode 100644 subworkflows/nf-core/bam_stats_samtools/meta.yml create mode 100644 subworkflows/nf-core/bam_stats_samtools/nextflow.config create mode 100644 tests/config/pytest_subworkflows.yml create mode 100644 tests/subworkflows/nf-core/align_bowtie2/main.nf create mode 100644 tests/subworkflows/nf-core/align_bowtie2/test.yml create mode 100644 tests/subworkflows/nf-core/bam_sort_samtools/main.nf create mode 100644 tests/subworkflows/nf-core/bam_sort_samtools/test.yml create mode 100644 tests/subworkflows/nf-core/bam_stats_samtools/main.nf create mode 100644 tests/subworkflows/nf-core/bam_stats_samtools/test.yml diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 263b36b3..55b8c296 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -71,6 +71,8 @@ jobs: - name: Lint ${{ matrix.tags }} run: nf-core modules lint ${{ matrix.tags }} + # HACK + if: startsWith( matrix.tags, 'subworkflow' ) != true - uses: actions/cache@v2 with: diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 7cbb2689..6f395409 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -9,6 +9,11 @@ jobs: changes: name: Check for changes runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + filter: + ["tests/config/pytest_modules.yml", "tests/config/pytest_subworkflows.yml"] outputs: # Expose matched filters as job 'modules' output variable modules: ${{ steps.filter.outputs.changes }} @@ -18,7 +23,7 @@ jobs: - uses: dorny/paths-filter@v2 id: filter with: - filters: "tests/config/pytest_modules.yml" + filters: ${{ matrix.filter }} test: runs-on: ubuntu-20.04 diff --git a/subworkflows/nf-core/align_bowtie2/main.nf b/subworkflows/nf-core/align_bowtie2/main.nf new file mode 100644 index 00000000..ec453f8d --- /dev/null +++ b/subworkflows/nf-core/align_bowtie2/main.nf @@ -0,0 +1,47 @@ +// +// Alignment with Bowtie2 +// + +params.align_options = [:] +params.samtools_sort_options = [:] +params.samtools_index_options = [:] +params.samtools_stats_options = [:] + +include { BOWTIE2_ALIGN } from '../../../modules/bowtie2/align/main' addParams( options: params.align_options ) +include { BAM_SORT_SAMTOOLS } from '../bam_sort_samtools/main' addParams( sort_options: params.samtools_sort_options, index_options: params.samtools_index_options, stats_options: params.samtools_stats_options ) + +workflow ALIGN_BOWTIE2 { + take: + reads // channel: [ val(meta), [ reads ] ] + index // channel: /path/to/bowtie2/index/ + + main: + + ch_versions = Channel.empty() + + // + // Map reads with Bowtie2 + // + BOWTIE2_ALIGN ( reads, index ) + ch_versions = ch_versions.mix(BOWTIE2_ALIGN.out.versions.first()) + + // + // Sort, index BAM file and run samtools stats, flagstat and idxstats + // + BAM_SORT_SAMTOOLS ( BOWTIE2_ALIGN.out.bam ) + ch_versions = ch_versions.mix(BAM_SORT_SAMTOOLS.out.versions) + + emit: + bam_orig = BOWTIE2_ALIGN.out.bam // channel: [ val(meta), bam ] + log_out = BOWTIE2_ALIGN.out.log // channel: [ val(meta), log ] + fastq = BOWTIE2_ALIGN.out.fastq // channel: [ val(meta), fastq ] + + bam = BAM_SORT_SAMTOOLS.out.bam // channel: [ val(meta), [ bam ] ] + bai = BAM_SORT_SAMTOOLS.out.bai // channel: [ val(meta), [ bai ] ] + csi = BAM_SORT_SAMTOOLS.out.csi // channel: [ val(meta), [ csi ] ] + stats = BAM_SORT_SAMTOOLS.out.stats // channel: [ val(meta), [ stats ] ] + flagstat = BAM_SORT_SAMTOOLS.out.flagstat // channel: [ val(meta), [ flagstat ] ] + idxstats = BAM_SORT_SAMTOOLS.out.idxstats // channel: [ val(meta), [ idxstats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/align_bowtie2/meta.yml b/subworkflows/nf-core/align_bowtie2/meta.yml new file mode 100644 index 00000000..e149a212 --- /dev/null +++ b/subworkflows/nf-core/align_bowtie2/meta.yml @@ -0,0 +1,50 @@ +name: align_bowtie2 +description: Align reads to a reference genome using bowtie2 then sort with samtools +keywords: + - align + - fasta + - genome + - reference +modules: + - bowtie2/align + - samtools/sort + - samtools/index + - samtools/stats + - samtools/idxstats + - samtools/flagstat +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - index: + type: file + description: Bowtie2 genome index files + pattern: '*.ebwt' +# TODO Update when we decide on a standard for subworkflow docs +output: + - bam: + type: file + description: Output BAM file containing read alignments + pattern: '*.{bam}' + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - fastq: + type: file + description: Unaligned FastQ files + pattern: '*.fastq.gz' + - log: + type: file + description: Alignment log + pattern: '*.log' + # TODO Add samtools outputs +authors: + - '@drpatelh' diff --git a/subworkflows/nf-core/align_bowtie2/nextflow.config b/subworkflows/nf-core/align_bowtie2/nextflow.config new file mode 100644 index 00000000..89994865 --- /dev/null +++ b/subworkflows/nf-core/align_bowtie2/nextflow.config @@ -0,0 +1,2 @@ +params.align_options = [:] +params.samtools_options = [:] diff --git a/subworkflows/nf-core/bam_sort_samtools/main.nf b/subworkflows/nf-core/bam_sort_samtools/main.nf new file mode 100644 index 00000000..c9be6a56 --- /dev/null +++ b/subworkflows/nf-core/bam_sort_samtools/main.nf @@ -0,0 +1,53 @@ +// +// Sort, index BAM file and run samtools stats, flagstat and idxstats +// + +params.sort_options = [:] +params.index_options = [:] +params.stats_options = [:] + +include { SAMTOOLS_SORT } from '../../../modules/samtools/sort/main' addParams( options: params.sort_options ) +include { SAMTOOLS_INDEX } from '../../../modules/samtools/index/main' addParams( options: params.index_options ) +include { BAM_STATS_SAMTOOLS } from '../bam_stats_samtools/main' addParams( options: params.stats_options ) + +workflow BAM_SORT_SAMTOOLS { + take: + ch_bam // channel: [ val(meta), [ bam ] ] + + main: + + ch_versions = Channel.empty() + + SAMTOOLS_SORT ( ch_bam ) + ch_versions = ch_versions.mix(SAMTOOLS_SORT.out.versions.first()) + + SAMTOOLS_INDEX ( SAMTOOLS_SORT.out.bam ) + ch_versions = ch_versions.mix(SAMTOOLS_INDEX.out.versions.first()) + + SAMTOOLS_SORT.out.bam + .join(SAMTOOLS_INDEX.out.bai, by: [0], remainder: true) + .join(SAMTOOLS_INDEX.out.csi, by: [0], remainder: true) + .map { + meta, bam, bai, csi -> + if (bai) { + [ meta, bam, bai ] + } else { + [ meta, bam, csi ] + } + } + .set { ch_bam_bai } + + BAM_STATS_SAMTOOLS ( ch_bam_bai ) + ch_versions = ch_versions.mix(BAM_STATS_SAMTOOLS.out.versions) + + emit: + bam = SAMTOOLS_SORT.out.bam // channel: [ val(meta), [ bam ] ] + bai = SAMTOOLS_INDEX.out.bai // channel: [ val(meta), [ bai ] ] + csi = SAMTOOLS_INDEX.out.csi // channel: [ val(meta), [ csi ] ] + + stats = BAM_STATS_SAMTOOLS.out.stats // channel: [ val(meta), [ stats ] ] + flagstat = BAM_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), [ flagstat ] ] + idxstats = BAM_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), [ idxstats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/bam_sort_samtools/meta.yml b/subworkflows/nf-core/bam_sort_samtools/meta.yml new file mode 100644 index 00000000..a0e3f30b --- /dev/null +++ b/subworkflows/nf-core/bam_sort_samtools/meta.yml @@ -0,0 +1,41 @@ +name: bam_sort_samtools +description: Sort SAM/BAM/CRAM file +keywords: + - sort + - bam + - sam + - cram +modules: + - samtools/sort + - samtools/index + - samtools/stats + - samtools/idxstats + - samtools/flagstat +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: '*.{bam,cram,sam}' +# TODO Update when we decide on a standard for subworkflow docs +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Sorted BAM/CRAM/SAM file + pattern: '*.{bam,cram,sam}' + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' +authors: + - '@drpatelh' + - '@ewels' diff --git a/subworkflows/nf-core/bam_sort_samtools/nextflow.config b/subworkflows/nf-core/bam_sort_samtools/nextflow.config new file mode 100644 index 00000000..2fd55747 --- /dev/null +++ b/subworkflows/nf-core/bam_sort_samtools/nextflow.config @@ -0,0 +1 @@ +params.options = [:] diff --git a/subworkflows/nf-core/bam_stats_samtools/main.nf b/subworkflows/nf-core/bam_stats_samtools/main.nf new file mode 100644 index 00000000..9276232c --- /dev/null +++ b/subworkflows/nf-core/bam_stats_samtools/main.nf @@ -0,0 +1,33 @@ +// +// Run SAMtools stats, flagstat and idxstats +// + +params.options = [:] + +include { SAMTOOLS_STATS } from '../../../modules/samtools/stats/main' addParams( options: params.options ) +include { SAMTOOLS_IDXSTATS } from '../../../modules/samtools/idxstats/main' addParams( options: params.options ) +include { SAMTOOLS_FLAGSTAT } from '../../../modules/samtools/flagstat/main' addParams( options: params.options ) + +workflow BAM_STATS_SAMTOOLS { + take: + ch_bam_bai // channel: [ val(meta), [ bam ], [bai/csi] ] + + main: + ch_versions = Channel.empty() + + SAMTOOLS_STATS ( ch_bam_bai ) + ch_versions = ch_versions.mix(SAMTOOLS_STATS.out.versions.first()) + + SAMTOOLS_FLAGSTAT ( ch_bam_bai ) + ch_versions = ch_versions.mix(SAMTOOLS_FLAGSTAT.out.versions.first()) + + SAMTOOLS_IDXSTATS ( ch_bam_bai ) + ch_versions = ch_versions.mix(SAMTOOLS_IDXSTATS.out.versions.first()) + + emit: + stats = SAMTOOLS_STATS.out.stats // channel: [ val(meta), [ stats ] ] + flagstat = SAMTOOLS_FLAGSTAT.out.flagstat // channel: [ val(meta), [ flagstat ] ] + idxstats = SAMTOOLS_IDXSTATS.out.idxstats // channel: [ val(meta), [ idxstats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/bam_stats_samtools/meta.yml b/subworkflows/nf-core/bam_stats_samtools/meta.yml new file mode 100644 index 00000000..509c5c97 --- /dev/null +++ b/subworkflows/nf-core/bam_stats_samtools/meta.yml @@ -0,0 +1,43 @@ +name: samtools_stats +description: Produces comprehensive statistics from SAM/BAM/CRAM file +keywords: + - statistics + - counts + - bam + - sam + - cram +modules: + - samtools/stats + - samtools/idxstats + - samtools/flagstat +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: '*.{bam,cram,sam}' + - bai: + type: file + description: Index for BAM/CRAM/SAM file + pattern: '*.{bai,crai,sai}' +# TODO Update when we decide on a standard for subworkflow docs +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - stats: + type: file + description: File containing samtools stats output + pattern: '*.{stats}' + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' +authors: + - '@drpatelh' diff --git a/subworkflows/nf-core/bam_stats_samtools/nextflow.config b/subworkflows/nf-core/bam_stats_samtools/nextflow.config new file mode 100644 index 00000000..2fd55747 --- /dev/null +++ b/subworkflows/nf-core/bam_stats_samtools/nextflow.config @@ -0,0 +1 @@ +params.options = [:] diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml new file mode 100644 index 00000000..a8ac84dc --- /dev/null +++ b/tests/config/pytest_subworkflows.yml @@ -0,0 +1,11 @@ +subworkflows/align_bowtie2: + - subworkflows/nf-core/align_bowtie2/** + - tests/subworkflows/nf-core/align_bowtie2/** + +subworkflows/bam_stats_samtools: + - subworkflows/nf-core/bam_stats_samtools/** + - tests/subworkflows/nf-core/bam_stats_samtools/** + +subworkflows/bam_sort_samtools: + - subworkflows/nf-core/bam_sort_samtools/** + - tests/subworkflows/nf-core/bam_sort_samtools/** diff --git a/tests/modules/samtools/sort/main.nf b/tests/modules/samtools/sort/main.nf index 91cd4d01..b76cdb1a 100644 --- a/tests/modules/samtools/sort/main.nf +++ b/tests/modules/samtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' addParams( options: [:] ) +include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' addParams( options: ['suffix': '.sorted'] ) workflow test_samtools_sort { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 477574fe..12e6669f 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -4,5 +4,5 @@ - samtools - samtools/sort files: - - path: output/samtools/test.bam - md5sum: bdc2d9e3f579f84df1e242207b627f89 + - path: output/samtools/test.sorted.bam + md5sum: bbb2db225f140e69a4ac577f74ccc90f diff --git a/tests/subworkflows/nf-core/align_bowtie2/main.nf b/tests/subworkflows/nf-core/align_bowtie2/main.nf new file mode 100644 index 00000000..9870242d --- /dev/null +++ b/tests/subworkflows/nf-core/align_bowtie2/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [:] ) +include { ALIGN_BOWTIE2 } from '../../../../subworkflows/nf-core/align_bowtie2/main.nf' addParams( 'samtools_sort_options': ['suffix': '.sorted'] ) + +workflow test_align_bowtie2_single_end { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BOWTIE2_BUILD ( fasta ) + ALIGN_BOWTIE2 ( input, BOWTIE2_BUILD.out.index ) +} + +workflow test_align_bowtie2_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BOWTIE2_BUILD ( fasta ) + ALIGN_BOWTIE2 ( input, BOWTIE2_BUILD.out.index ) +} diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml new file mode 100644 index 00000000..51261a14 --- /dev/null +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -0,0 +1,81 @@ +- name: align bowtie2 single-end + command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_single_end -c tests/config/nextflow.config + tags: + - subworkflows/align_bowtie2 + - subworkflows/bam_sort_samtools + - subworkflows/bam_stats_samtools + # Modules + - bowtie2 + - bowtie2/align + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/bowtie2/test.bam + - path: ./output/bowtie2/test.bowtie2.log + - path: ./output/index/bowtie2/genome.1.bt2 + md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf + - path: ./output/index/bowtie2/genome.2.bt2 + md5sum: 47b153cd1319abc88dda532462651fcf + - path: ./output/index/bowtie2/genome.3.bt2 + md5sum: 4ed93abba181d8dfab2e303e33114777 + - path: ./output/index/bowtie2/genome.4.bt2 + md5sum: c25be5f8b0378abf7a58c8a880b87626 + - path: ./output/index/bowtie2/genome.rev.1.bt2 + md5sum: 52be6950579598a990570fbcf5372184 + - path: ./output/index/bowtie2/genome.rev.2.bt2 + md5sum: e3b4ef343dea4dd571642010a7d09597 + # samtools sort + - path: ./output/samtools/test.sorted.bam + - path: ./output/samtools/test.sorted.bam.bai + # samtools stats + - path: ./output/samtools/test.sorted.bam.flagstat + md5sum: e9ce9093133116bc54fd335cfe698372 + - path: ./output/samtools/test.sorted.bam.idxstats + md5sum: e16eb632f7f462514b0873c7ac8ac905 + - path: ./output/samtools/test.sorted.bam.stats + md5sum: 2d837cd72432cd856fca70d33f02ffb5 + +- name: align bowtie2 paired-end + command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config + tags: + - subworkflows/align_bowtie2 + - subworkflows/bam_sort_samtools + - subworkflows/bam_stats_samtools + # Modules + - bowtie2 + - bowtie2/align + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/bowtie2/test.bam + - path: ./output/bowtie2/test.bowtie2.log + - path: ./output/index/bowtie2/genome.1.bt2 + md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf + - path: ./output/index/bowtie2/genome.2.bt2 + md5sum: 47b153cd1319abc88dda532462651fcf + - path: ./output/index/bowtie2/genome.3.bt2 + md5sum: 4ed93abba181d8dfab2e303e33114777 + - path: ./output/index/bowtie2/genome.4.bt2 + md5sum: c25be5f8b0378abf7a58c8a880b87626 + - path: ./output/index/bowtie2/genome.rev.1.bt2 + md5sum: 52be6950579598a990570fbcf5372184 + - path: ./output/index/bowtie2/genome.rev.2.bt2 + md5sum: e3b4ef343dea4dd571642010a7d09597 + # samtools sort + - path: ./output/samtools/test.sorted.bam + - path: ./output/samtools/test.sorted.bam.bai + # samtools stats + - path: ./output/samtools/test.sorted.bam.flagstat + md5sum: 49f3d51a8804ce58fe9cecd2549d279b + - path: ./output/samtools/test.sorted.bam.idxstats + md5sum: 29ff2fa56d35b2a47625b8f517f1a947 + - path: ./output/samtools/test.sorted.bam.stats + md5sum: 98aa88a39d26244c89bd4e577953fb48 diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/main.nf b/tests/subworkflows/nf-core/bam_sort_samtools/main.nf new file mode 100644 index 00000000..0dae6b2b --- /dev/null +++ b/tests/subworkflows/nf-core/bam_sort_samtools/main.nf @@ -0,0 +1,21 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAM_SORT_SAMTOOLS } from '../../../../subworkflows/nf-core/bam_sort_samtools/main' addParams( sort_options: ['suffix': '.sorted'] ) + +workflow test_bam_sort_samtools_single_end { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + ] + + BAM_SORT_SAMTOOLS ( input ) +} + +workflow test_bam_sort_samtools_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + BAM_SORT_SAMTOOLS ( input ) +} diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml new file mode 100644 index 00000000..e2fc27d8 --- /dev/null +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -0,0 +1,47 @@ +- name: bam sort samtools single-end + command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_single_end -c tests/config/nextflow.config + tags: + - subworkflows/bam_sort_samtools + - subworkflows/bam_stats_samtools + # Modules + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/samtools/test.sorted.bam + md5sum: e4c77897d6824ce4df486d1b100618af + - path: ./output/samtools/test.sorted.bam.bai + md5sum: a70940ce9ba2e700ec2984e0a6526099 + # samtools stats + - path: ./output/samtools/test.sorted.bam.flagstat + md5sum: 2191911d72575a2358b08b1df64ccb53 + - path: ./output/samtools/test.sorted.bam.idxstats + md5sum: 613e048487662c694aa4a2f73ca96a20 + - path: ./output/samtools/test.sorted.bam.stats + +- name: bam sort samtools paired-end + command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_paired_end -c tests/config/nextflow.config + tags: + - subworkflows/bam_sort_samtools + - subworkflows/bam_stats_samtools + # Modules + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/samtools/test.sorted.bam + md5sum: bbb2db225f140e69a4ac577f74ccc90f + - path: ./output/samtools/test.sorted.bam.bai + md5sum: 20c91e3a0fd4661d7cb967f40d2486ba + # samtools stats + - path: ./output/samtools/test.sorted.bam.flagstat + md5sum: 4f7ffd1e6a5e85524d443209ac97d783 + - path: ./output/samtools/test.sorted.bam.idxstats + md5sum: df60a8c8d6621100d05178c93fb053a2 + - path: ./output/samtools/test.sorted.bam.stats diff --git a/tests/subworkflows/nf-core/bam_stats_samtools/main.nf b/tests/subworkflows/nf-core/bam_stats_samtools/main.nf new file mode 100644 index 00000000..a390c3eb --- /dev/null +++ b/tests/subworkflows/nf-core/bam_stats_samtools/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAM_STATS_SAMTOOLS } from '../../../../subworkflows/nf-core/bam_stats_samtools/main' addParams( options: [:] ) + +workflow test_bam_stats_samtools_single_end { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam_bai'], checkIfExists: true) + ] + + BAM_STATS_SAMTOOLS ( input ) +} + +workflow test_bam_stats_samtools_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + + BAM_STATS_SAMTOOLS ( input ) +} diff --git a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml new file mode 100644 index 00000000..d93c95a5 --- /dev/null +++ b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml @@ -0,0 +1,31 @@ +- name: bam stats samtools single-end + command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_single_end -c tests/config/nextflow.config + tags: + - subworkflows/bam_stats_samtools + # Modules + - samtools + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/samtools/test.single_end.sorted.bam.flagstat + md5sum: 2191911d72575a2358b08b1df64ccb53 + - path: ./output/samtools/test.single_end.sorted.bam.idxstats + md5sum: 613e048487662c694aa4a2f73ca96a20 + - path: ./output/samtools/test.single_end.sorted.bam.stats + +- name: bam stats samtools paired-end + command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_paired_end -c tests/config/nextflow.config + tags: + - subworkflows/bam_stats_samtools + # Modules + - samtools + - samtools/stats + - samtools/idxstats + - samtools/flagstat + files: + - path: ./output/samtools/test.paired_end.sorted.bam.flagstat + md5sum: 4f7ffd1e6a5e85524d443209ac97d783 + - path: ./output/samtools/test.paired_end.sorted.bam.idxstats + md5sum: df60a8c8d6621100d05178c93fb053a2 + - path: ./output/samtools/test.paired_end.sorted.bam.stats diff --git a/tests/test_versions_yml.py b/tests/test_versions_yml.py index 759fc4d5..2f78ab2e 100644 --- a/tests/test_versions_yml.py +++ b/tests/test_versions_yml.py @@ -11,7 +11,7 @@ def _get_workflow_names(): To do so, recursively finds all test.yml files and parses their content. """ here = Path(__file__).parent.resolve() - pytest_workflow_files = here.glob("**/test.yml") + pytest_workflow_files = here.glob("modules/**/test.yml") for f in pytest_workflow_files: # test_config = yaml.safe_load(f.read_text()) test_config = yaml.load(f.read_text(), Loader=yaml.BaseLoader) From 07c0830057cc655de113d84499c7c1499460bb55 Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Mon, 11 Oct 2021 23:30:41 +0200 Subject: [PATCH 135/314] Add a module for sra-tools prefetch (#714) * chore: apply module template * refactor: add NCBI settings to options * docs: complete meta information * feat: add prefetch process * fix: correct bash commands * tests: define the right tests * style: move option definition to satisfy linting * fix: extract version correctly * fix: correct newline issues * refactor: address review comments * Apply suggestions from code review * chore: add retrying via nf-core label * refactor: validate download thoroughly * refactor: remove vdb-config input Co-authored-by: Harshil Patel --- modules/sratools/prefetch/functions.nf | 78 ++++++++++++++++++++++++ modules/sratools/prefetch/main.nf | 50 +++++++++++++++ modules/sratools/prefetch/meta.yml | 43 +++++++++++++ tests/config/pytest_modules.yml | 12 ++-- tests/modules/sratools/prefetch/main.nf | 15 +++++ tests/modules/sratools/prefetch/test.yml | 8 +++ 6 files changed, 202 insertions(+), 4 deletions(-) create mode 100644 modules/sratools/prefetch/functions.nf create mode 100644 modules/sratools/prefetch/main.nf create mode 100644 modules/sratools/prefetch/meta.yml create mode 100644 tests/modules/sratools/prefetch/main.nf create mode 100644 tests/modules/sratools/prefetch/test.yml diff --git a/modules/sratools/prefetch/functions.nf b/modules/sratools/prefetch/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/sratools/prefetch/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/sratools/prefetch/main.nf b/modules/sratools/prefetch/main.nf new file mode 100644 index 00000000..207d1e10 --- /dev/null +++ b/modules/sratools/prefetch/main.nf @@ -0,0 +1,50 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SRATOOLS_PREFETCH { + tag "$id" + label 'process_low' + label 'error_retry' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' + } else { + container 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' + } + + input: + tuple val(meta), val(id) + + output: + tuple val(meta), path("$id"), emit: sra + path "versions.yml" , emit: versions + + script: + def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" + """ + eval "\$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" + if [[ ! -f "\${NCBI_SETTINGS}" ]]; then + mkdir -p "\$(dirname "\${NCBI_SETTINGS}")" + printf '${config}' > "\${NCBI_SETTINGS}" + fi + + prefetch \\ + $options.args \\ + --progress \\ + $id + + vdb-validate $id + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') + END_VERSIONS + """ +} diff --git a/modules/sratools/prefetch/meta.yml b/modules/sratools/prefetch/meta.yml new file mode 100644 index 00000000..ab0a5ce5 --- /dev/null +++ b/modules/sratools/prefetch/meta.yml @@ -0,0 +1,43 @@ +name: sratools_prefetch +description: Download sequencing data from the NCBI Sequence Read Archive (SRA). +keywords: + - sequencing + - fastq + - prefetch +tools: + - sratools: + description: SRA Toolkit and SDK from NCBI + homepage: https://github.com/ncbi/sra-tools + documentation: https://github.com/ncbi/sra-tools/wiki + tool_dev_url: https://github.com/ncbi/sra-tools + licence: ['Public Domain'] + +input: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - id: + type: val + description: > + A string denoting an SRA id. + +output: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - sra: + type: directory + description: > + Directory containing the ETL data for the given SRA id. + pattern: "*/*.sra" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@Midnighter" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 45a4d62c..34c37b0b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -529,6 +529,10 @@ iqtree: - modules/iqtree/** - tests/modules/iqtree/** +ismapper: + - modules/ismapper/** + - tests/modules/ismapper/** + isoseq3/cluster: - modules/isoseq3/cluster/** - tests/modules/isoseq3/cluster/** @@ -537,10 +541,6 @@ isoseq3/refine: - modules/isoseq3/refine/** - tests/modules/isoseq3/refine/** -ismapper: - - modules/ismapper/** - - tests/modules/ismapper/** - ivar/consensus: - modules/ivar/consensus/** - tests/modules/ivar/consensus/** @@ -979,6 +979,10 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** +sratools/prefetch: + - modules/sratools/prefetch/** + - tests/modules/sratools/prefetch/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** diff --git a/tests/modules/sratools/prefetch/main.nf b/tests/modules/sratools/prefetch/main.nf new file mode 100644 index 00000000..99439a7f --- /dev/null +++ b/tests/modules/sratools/prefetch/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' addParams( options: [:] ) + +workflow test_sratools_prefetch { + + input = [ + [ id:'test', single_end:false ], // meta map + 'ERR2815334' + ] + + SRATOOLS_PREFETCH ( input ) +} diff --git a/tests/modules/sratools/prefetch/test.yml b/tests/modules/sratools/prefetch/test.yml new file mode 100644 index 00000000..c23db12a --- /dev/null +++ b/tests/modules/sratools/prefetch/test.yml @@ -0,0 +1,8 @@ +- name: sratools prefetch test_sratools_prefetch + command: nextflow run tests/modules/sratools/prefetch -entry test_sratools_prefetch -c tests/config/nextflow.config + tags: + - sratools/prefetch + - sratools + files: + - path: output/sratools/ERR2815334/ERR2815334.sra + md5sum: 9a98c7f6f4774b7ef94aa915b92a54ea From d0df4b03e35676d5a303027d813c94013c7e0286 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Tue, 12 Oct 2021 07:14:20 +0000 Subject: [PATCH 136/314] ci: Duplicate tests for subworkflows (#815) Causing an issue with only running the subworkflow tests and now the modules. Just for now, we can come back to this. --- .github/workflows/pytest-workflow.yml | 125 +++++++++++++++++++++++--- 1 file changed, 113 insertions(+), 12 deletions(-) diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 6f395409..5ece35b8 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -6,14 +6,12 @@ on: branches: [master] jobs: - changes: - name: Check for changes + ########### + # Modules # + ########### + module_changes: + name: Check for changes in the modules runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - filter: - ["tests/config/pytest_modules.yml", "tests/config/pytest_subworkflows.yml"] outputs: # Expose matched filters as job 'modules' output variable modules: ${{ steps.filter.outputs.changes }} @@ -23,19 +21,122 @@ jobs: - uses: dorny/paths-filter@v2 id: filter with: - filters: ${{ matrix.filter }} + filters: "tests/config/pytest_modules.yml" - test: + module_test: runs-on: ubuntu-20.04 name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: changes - if: needs.changes.outputs.modules != '[]' + needs: module_changes + if: needs.module_changes.outputs.modules != '[]' strategy: fail-fast: false matrix: nxf_version: ["21.04.0"] - tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] + tags: ["${{ fromJson(needs.modules_changes.outputs.modules) }}"] + profile: ["docker", "singularity", "conda"] + env: + NXF_ANSI_LOG: false + steps: + - uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.x" + + - uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.x" + + - name: Install Python dependencies + run: python -m pip install --upgrade pip pytest-workflow + + - uses: actions/cache@v2 + with: + path: /usr/local/bin/nextflow + key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} + restore-keys: | + ${{ runner.os }}-nextflow- + + - name: Install Nextflow + env: + NXF_VER: ${{ matrix.nxf_version }} + CAPSULE_LOG: none + run: | + wget -qO- get.nextflow.io | bash + sudo mv nextflow /usr/local/bin/ + + - name: Set up Singularity + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-singularity@v5 + with: + singularity-version: 3.7.1 + + - name: Setup miniconda + if: matrix.profile == 'conda' + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + channels: conda-forge,bioconda,defaults + python-version: ${{ matrix.python-version }} + + - name: Conda clean + if: matrix.profile == 'conda' + run: conda clean -a + + # Test the module + - name: Run pytest-workflow + # only use one thread for pytest-workflow to avoid race condition on conda cache. + run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + + - name: Upload logs on failure + if: failure() + uses: actions/upload-artifact@v2 + with: + name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} + path: | + /home/runner/pytest_workflow_*/*/.nextflow.log + /home/runner/pytest_workflow_*/*/log.out + /home/runner/pytest_workflow_*/*/log.err + /home/runner/pytest_workflow_*/*/work + + ################ + # Subworkflows # + ################ + subworkflow_changes: + name: Check for changes in the subworkflows + runs-on: ubuntu-latest + outputs: + # Expose matched filters as job 'subworkflows' output variable + subworkflows: ${{ steps.filter.outputs.changes }} + steps: + - uses: actions/checkout@v2 + + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: "tests/config/pytest_subworkflows.yml" + + subworkflow_test: + runs-on: ubuntu-20.04 + + name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} + needs: subworkflow_changes + if: needs.subworkflow_changes.outputs.subworkflows != '[]' + strategy: + fail-fast: false + matrix: + nxf_version: ["21.04.0"] + tags: ["${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }}"] profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false From 194e598a34bf8fc651be61f354c02e286d7f03a7 Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Tue, 12 Oct 2021 12:58:19 +0200 Subject: [PATCH 137/314] Fix workflow (#817) * tests: fix tags definition * fix: correct typo --- .github/workflows/pytest-workflow.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 5ece35b8..0b509527 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -33,7 +33,7 @@ jobs: fail-fast: false matrix: nxf_version: ["21.04.0"] - tags: ["${{ fromJson(needs.modules_changes.outputs.modules) }}"] + tags: ${{ fromJson(needs.module_changes.outputs.modules) }} profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false @@ -136,7 +136,7 @@ jobs: fail-fast: false matrix: nxf_version: ["21.04.0"] - tags: ["${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }}"] + tags: ${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }} profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false From 3a4935d21ba2301fb9f322a0497f663ba447c446 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Tue, 12 Oct 2021 13:43:08 +0100 Subject: [PATCH 138/314] New module: `bamtools/split` (#798) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update with new versions.yml file * 🐛 FIX: Update meta.yml + correct typos * Update modules/bamtools/split/meta.yml Add bam, split, chunk tags Co-authored-by: James A. Fellows Yates * 🐛 FIX: Correct meta.yml Co-authored-by: James A. Fellows Yates --- modules/bamtools/split/functions.nf | 78 +++++++++++++++++++++++++++ modules/bamtools/split/main.nf | 41 ++++++++++++++ modules/bamtools/split/meta.yml | 45 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bamtools/split/main.nf | 14 +++++ tests/modules/bamtools/split/test.yml | 10 ++++ 6 files changed, 192 insertions(+) create mode 100644 modules/bamtools/split/functions.nf create mode 100644 modules/bamtools/split/main.nf create mode 100644 modules/bamtools/split/meta.yml create mode 100644 tests/modules/bamtools/split/main.nf create mode 100644 tests/modules/bamtools/split/test.yml diff --git a/modules/bamtools/split/functions.nf b/modules/bamtools/split/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bamtools/split/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf new file mode 100644 index 00000000..506a957c --- /dev/null +++ b/modules/bamtools/split/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAMTOOLS_SPLIT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bamtools=2.5.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9" + } else { + container "quay.io/biocontainers/bamtools:2.5.1--h9a82719_9" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + bamtools \\ + split \\ + -in $bam \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + bamtools: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) + END_VERSIONS + """ +} diff --git a/modules/bamtools/split/meta.yml b/modules/bamtools/split/meta.yml new file mode 100644 index 00000000..b9b52f59 --- /dev/null +++ b/modules/bamtools/split/meta.yml @@ -0,0 +1,45 @@ +name: bamtools_split +description: BamTools provides both a programmer's API and an end-user's toolkit for handling BAM files. +keywords: + - bamtools + - bamtools/split + - bam + - split + - chunk +tools: + - bamtools: + description: C++ API & command-line toolkit for working with BAM data + homepage: http://github.com/pezmaster31/bamtools + documentation: https://github.com/pezmaster31/bamtools/wiki + tool_dev_url: http://github.com/pezmaster31/bamtools + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: A BAM file to split + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Several Bam files + pattern: "*.bam" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 34c37b0b..cd51b86b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -34,6 +34,10 @@ bamaligncleaner: - modules/bamaligncleaner/** - tests/modules/bamaligncleaner/** +bamtools/split: + - modules/bamtools/split/** + - tests/modules/bamtools/split/** + bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf new file mode 100644 index 00000000..5538c86f --- /dev/null +++ b/tests/modules/bamtools/split/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' addParams( options: [args:"-reference"] ) + +workflow test_bamtools_split { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + BAMTOOLS_SPLIT ( input ) +} diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml new file mode 100644 index 00000000..f92f9345 --- /dev/null +++ b/tests/modules/bamtools/split/test.yml @@ -0,0 +1,10 @@ +- name: bamtools split test_bamtools_split + command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config + tags: + - bamtools + - bamtools/split + files: + - path: output/bamtools/test.paired_end.sorted.REF_chr22:16570000-16610000.bam + md5sum: 256535b9a3ab5864be0f7dea2218d159 + - path: output/bamtools/test.paired_end.sorted.REF_unmapped.bam + md5sum: 568e058d871d8bc319330360bcae4e43 From de997825de788fe2210db16d9426f10342a1ba1d Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Tue, 12 Oct 2021 15:20:58 +0200 Subject: [PATCH 139/314] Add a new module for fasterq-dump (#807) * chore: use template to create fasterq module * feat: add fasterq-dump process module * docs: provide input and output descriptions * docs: add comment on `--temp` * fix: use correct variable * tests: define test output * refactor: address review comments * refactor: remove vdb-config input * chore: add new test data to config * tests: define single-end and paired-end cases * refactor: choose specific output * tests: do not expect single FASTQ for paired-end * feat: add compression * Apply suggestions from code review Co-authored-by: Harshil Patel * tests: revert the test data name * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/sratools/fasterqdump/functions.nf | 78 +++++++++++++++++++++ modules/sratools/fasterqdump/main.nf | 58 +++++++++++++++ modules/sratools/fasterqdump/meta.yml | 42 +++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 7 +- tests/modules/sratools/fasterqdump/main.nf | 28 ++++++++ tests/modules/sratools/fasterqdump/test.yml | 23 ++++++ 7 files changed, 238 insertions(+), 2 deletions(-) create mode 100644 modules/sratools/fasterqdump/functions.nf create mode 100644 modules/sratools/fasterqdump/main.nf create mode 100644 modules/sratools/fasterqdump/meta.yml create mode 100644 tests/modules/sratools/fasterqdump/main.nf create mode 100644 tests/modules/sratools/fasterqdump/test.yml diff --git a/modules/sratools/fasterqdump/functions.nf b/modules/sratools/fasterqdump/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/sratools/fasterqdump/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/sratools/fasterqdump/main.nf b/modules/sratools/fasterqdump/main.nf new file mode 100644 index 00000000..08ef9045 --- /dev/null +++ b/modules/sratools/fasterqdump/main.nf @@ -0,0 +1,58 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SRATOOLS_FASTERQDUMP { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6' : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' + } else { + container 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' + } + + input: + tuple val(meta), path(sra) + + output: + tuple val(meta), path(output), emit: reads + path "versions.yml" , emit: versions + + script: + def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" + // Paired-end data extracted by fasterq-dump (--split-3 the default) always creates + // *_1.fastq *_2.fastq files but sometimes also an additional *.fastq file + // for unpaired reads which we ignore here. + output = meta.single_end ? '*.fastq.gz' : '*_{1,2}.fastq.gz' + """ + eval "\$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" + if [[ ! -f "\${NCBI_SETTINGS}" ]]; then + mkdir -p "\$(dirname "\${NCBI_SETTINGS}")" + printf '${config}' > "\${NCBI_SETTINGS}" + fi + + fasterq-dump \\ + ${options.args} \\ + --threads $task.cpus \\ + ${sra.name} + + pigz \\ + ${options.args2} \\ + --no-name \\ + --processes $task.cpus \\ + *.fastq + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ +} diff --git a/modules/sratools/fasterqdump/meta.yml b/modules/sratools/fasterqdump/meta.yml new file mode 100644 index 00000000..ac61e71f --- /dev/null +++ b/modules/sratools/fasterqdump/meta.yml @@ -0,0 +1,42 @@ +name: sratools_fasterqdump +description: Extract sequencing reads in FASTQ format from a given NCBI Sequence Read Archive (SRA). +keywords: + - sequencing + - FASTQ + - dump +tools: + - sratools: + description: SRA Toolkit and SDK from NCBI + homepage: https://github.com/ncbi/sra-tools + documentation: https://github.com/ncbi/sra-tools/wiki + tool_dev_url: https://github.com/ncbi/sra-tools + licence: ['Public Domain'] + +input: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - sra: + type: directory + description: Directory containing ETL data for the given SRA. + pattern: "*/*.sra" + +output: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: Extracted FASTQ file or files if the sequencing reads are paired-end. + pattern: "*.fastq.gz" + +authors: + - "@Midnighter" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index cd51b86b..e1fba94c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -987,6 +987,10 @@ sratools/prefetch: - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** +sratools/fasterqdump: + - modules/sratools/fasterqdump/** + - tests/modules/sratools/fasterqdump/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 30e6f1ea..b4443e9f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -80,6 +80,9 @@ params { assembly_gfa = "${test_data_dir}/genomics/sarscov2/illumina/gfa/assembly.gfa" test_single_end_bam_readlist_txt = "${test_data_dir}/genomics/sarscov2/illumina/picard/test.single_end.bam.readlist.txt" + + SRR13255544_tar_gz = "${test_data_dir}/genomics/sarscov2/illumina/sra/SRR13255544.tar.gz" + SRR11140744_tar_gz = "${test_data_dir}/genomics/sarscov2/illumina/sra/SRR11140744.tar.gz" } 'nanopore' { test_sorted_bam = "${test_data_dir}/genomics/sarscov2/nanopore/bam/test.sorted.bam" @@ -155,11 +158,11 @@ params { test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" test_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.pileups.table" test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" - + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" - test_test2_paired_mutect2_calls_f1r2_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.f1r2.tar.gz" + test_test2_paired_mutect2_calls_f1r2_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.f1r2.tar.gz" test_test2_paired_mutect2_calls_artifact_prior_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired_mutect2_calls.artifact-prior.tar.gz" test_test2_paired_segmentation_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired.segmentation.table" test_test2_paired_contamination_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_test2_paired.contamination.table" diff --git a/tests/modules/sratools/fasterqdump/main.nf b/tests/modules/sratools/fasterqdump/main.nf new file mode 100644 index 00000000..1a0e0c7a --- /dev/null +++ b/tests/modules/sratools/fasterqdump/main.nf @@ -0,0 +1,28 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' addParams( options: [:] ) + +workflow test_sratools_fasterqdump_single_end { + + archive = file(params.test_data['sarscov2']['illumina']['SRR13255544_tar_gz'], checkIfExists: true) + UNTAR ( archive ) + + def input = Channel.of([ id:'test_single_end', single_end:true ]) + .combine(UNTAR.out.untar) + + SRATOOLS_FASTERQDUMP ( input ) +} + +workflow test_sratools_fasterqdump_paired_end { + + archive = file(params.test_data['sarscov2']['illumina']['SRR11140744_tar_gz'], checkIfExists: true) + UNTAR ( archive ) + + def input = Channel.of([ id:'test_paired_end', single_end:false ]) + .combine(UNTAR.out.untar) + + SRATOOLS_FASTERQDUMP ( input ) +} diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml new file mode 100644 index 00000000..94da4ed8 --- /dev/null +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -0,0 +1,23 @@ +- name: sratools fasterqdump test_sratools_fasterqdump_single_end + command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config + tags: + - sratools + - sratools/fasterqdump + files: + - path: output/sratools/SRR13255544.fastq.gz + md5sum: 1054c7b71884acdb5eed8a378f18be82 + - path: output/untar/SRR13255544/SRR13255544.sra + md5sum: 466d05dafb2eec672150754168010b4d + +- name: sratools fasterqdump test_sratools_fasterqdump_paired_end + command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config + tags: + - sratools + - sratools/fasterqdump + files: + - path: output/sratools/SRR11140744_1.fastq.gz + md5sum: 193809c784a4ea132ab2a253fa4f55b6 + - path: output/sratools/SRR11140744_2.fastq.gz + md5sum: 3e3b3af3413f50a1685fd7b3f1456d4e + - path: output/untar/SRR11140744/SRR11140744.sra + md5sum: 065666caf5b2d5dfb0cb25d5f3abe659 From 359f721cc957e484b62b6ce11e91456f4bbce084 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 12 Oct 2021 17:06:06 +0200 Subject: [PATCH 140/314] Add MEGAHIT (#810) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- modules/megahit/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/megahit/main.nf | 76 ++++++++++++++++++++++++++++++++ modules/megahit/meta.yml | 62 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/megahit/main.nf | 28 ++++++++++++ tests/modules/megahit/test.yml | 71 ++++++++++++++++++++++++++++++ 6 files changed, 319 insertions(+) create mode 100644 modules/megahit/functions.nf create mode 100644 modules/megahit/main.nf create mode 100644 modules/megahit/meta.yml create mode 100644 tests/modules/megahit/main.nf create mode 100644 tests/modules/megahit/test.yml diff --git a/modules/megahit/functions.nf b/modules/megahit/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/megahit/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf new file mode 100644 index 00000000..8c8a5555 --- /dev/null +++ b/modules/megahit/main.nf @@ -0,0 +1,76 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MEGAHIT { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::megahit=1.2.9 conda-forge::pigz=2.6" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" + } else { + container "quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("megahit_out/*.contigs.fa.gz") , emit: contigs + tuple val(meta), path("megahit_out/intermediate_contigs/k*.contigs.fa.gz") , emit: k_contigs + tuple val(meta), path("megahit_out/intermediate_contigs/k*.addi.fa.gz") , emit: addi_contigs + tuple val(meta), path("megahit_out/intermediate_contigs/k*.local.fa.gz") , emit: local_contigs + tuple val(meta), path("megahit_out/intermediate_contigs/k*.final.contigs.fa.gz"), emit: kfinal_contigs + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if (meta.single_end) { + """ + megahit \\ + -r ${reads} \\ + -t $task.cpus \\ + $options.args \\ + --out-prefix $prefix + + pigz \\ + --no-name \\ + -p $task.cpus \\ + $options.args2 \\ + megahit_out/*.fa \\ + megahit_out/intermediate_contigs/*.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + END_VERSIONS + """ + } else { + """ + megahit \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + -t $task.cpus \\ + $options.args \\ + --out-prefix $prefix + + pigz \\ + --no-name \\ + -p $task.cpus \\ + $options.args2 \\ + megahit_out/*.fa \\ + megahit_out/intermediate_contigs/*.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + END_VERSIONS + """ + } +} diff --git a/modules/megahit/meta.yml b/modules/megahit/meta.yml new file mode 100644 index 00000000..e4b2181b --- /dev/null +++ b/modules/megahit/meta.yml @@ -0,0 +1,62 @@ +name: megahit +description: An ultra-fast metagenomic assembler for large and complex metagenomics +keywords: + - megahit + - denovo + - assembly + - debruijn + - metagenomics +tools: + - megahit: + description: "An ultra-fast single-node solution for large and complex metagenomics assembly via succinct de Bruijn graph" + homepage: https://github.com/voutcn/megahit + documentation: https://github.com/voutcn/megahit + tool_dev_url: https://github.com/voutcn/megahit + doi: "10.1093/bioinformatics/btv033" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information and input single, or paired-end FASTA/FASTQ files (optionally decompressed) + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively in gzipped or uncompressed FASTQ or FASTA format. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - contigs: + type: file + description: Final final contigs result of the assembly in FASTA format. + pattern: "*.contigs.fa.gz" + - k_contigs: + type: file + description: Contigs assembled from the de Bruijn graph of order-K + pattern: "k*.contigs.fa.gz" + - addi_contigs: + type: file + description: Contigs assembled after iteratively removing local low coverage unitigs in the de Bruijn graph of order-K + pattern: "k*.addi.fa.gz" + - local_contigs: + type: file + description: Contigs of the locally assembled contigs for k=K + pattern: "k*.local.fa.gz" + - kfinal_contigs: + type: file + description: Stand-alone contigs for k=K; if local assembly is turned on, the file will be empty + pattern: "k*.final.contigs.fa.gz" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e1fba94c..decce4be 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -650,6 +650,10 @@ mashtree: - modules/mashtree/** - tests/modules/mashtree/** +megahit: + - modules/megahit/** + - tests/modules/megahit/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/megahit/main.nf b/tests/modules/megahit/main.nf new file mode 100644 index 00000000..dcf07cd6 --- /dev/null +++ b/tests/modules/megahit/main.nf @@ -0,0 +1,28 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MEGAHIT } from '../../../modules/megahit/main.nf' addParams( options: [:] ) + +workflow test_megahit { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + MEGAHIT ( input ) +} + +workflow test_megahit_single { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + + MEGAHIT ( input ) +} diff --git a/tests/modules/megahit/test.yml b/tests/modules/megahit/test.yml new file mode 100644 index 00000000..c390891b --- /dev/null +++ b/tests/modules/megahit/test.yml @@ -0,0 +1,71 @@ +- name: megahit + command: nextflow run ./tests/modules/megahit -entry test_megahit -c tests/config/nextflow.config -process.cpus 1 + tags: + - megahit + files: + - path: output/megahit/megahit_out/test.contigs.fa.gz + md5sum: 8ed114f22130e16df3532d3f6b03e116 + - path: output/megahit/megahit_out/intermediate_contigs/k21.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k21.contigs.fa.gz + md5sum: 4221d45f238045bbdb1eea04e4ce4261 + - path: output/megahit/megahit_out/intermediate_contigs/k21.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k21.local.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k29.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k29.contigs.fa.gz + md5sum: c72aeb242788542af0260098b4d61204 + - path: output/megahit/megahit_out/intermediate_contigs/k29.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k29.local.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k39.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k39.contigs.fa.gz + md5sum: aa188f4c92e69c1a4b396e8f2991236f + - path: output/megahit/megahit_out/intermediate_contigs/k39.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k39.local.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + +- name: megahit_single + command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c tests/config/nextflow.config -process.cpus 1 + tags: + - megahit + files: + - path: output/megahit/megahit_out/test.contigs.fa.gz + md5sum: f50352838b778cc67824f631197a8346 + - path: output/megahit/megahit_out/intermediate_contigs/k21.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k21.contigs.fa.gz + md5sum: 61554dc60ba8e95d9c1d9dca8d465bef + - path: output/megahit/megahit_out/intermediate_contigs/k21.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k21.local.fa.gz + md5sum: b916fc620fdf0d23ef33485352c168b3 + - path: output/megahit/megahit_out/intermediate_contigs/k29.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k29.contigs.fa.gz + md5sum: d916bc564854aa0fabaa5234035aa47b + - path: output/megahit/megahit_out/intermediate_contigs/k29.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k29.local.fa.gz + md5sum: cccf44441e65913b02fb64eb0835dcc1 + - path: output/megahit/megahit_out/intermediate_contigs/k39.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k39.contigs.fa.gz + md5sum: 4416a9e846ccbeb06b880ac2fdc02925 + - path: output/megahit/megahit_out/intermediate_contigs/k39.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k39.local.fa.gz + md5sum: 590d0a08285226d24f7f984f7b3b4f65 + - path: output/megahit/megahit_out/intermediate_contigs/k59.addi.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k59.contigs.fa.gz + md5sum: 51ef726b87a53b0cbdde762d7973a8a7 + - path: output/megahit/megahit_out/intermediate_contigs/k59.final.contigs.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/megahit/megahit_out/intermediate_contigs/k59.local.fa.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a From 7b1e84f7be7cd662c0e149f04f49c4cc49288c55 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 13 Oct 2021 10:40:04 +0200 Subject: [PATCH 141/314] Add bcftools/index (#812) * feat: add bcftools index * Extend tests to also test gen for TBI * Update meta.yml * Update meta.yml --- modules/bcftools/index/functions.nf | 78 +++++++++++++++++++++++++++ modules/bcftools/index/main.nf | 44 +++++++++++++++ modules/bcftools/index/meta.yml | 49 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/index/main.nf | 23 ++++++++ tests/modules/bcftools/index/test.yml | 17 ++++++ 6 files changed, 215 insertions(+) create mode 100644 modules/bcftools/index/functions.nf create mode 100644 modules/bcftools/index/main.nf create mode 100644 modules/bcftools/index/meta.yml create mode 100644 tests/modules/bcftools/index/main.nf create mode 100644 tests/modules/bcftools/index/test.yml diff --git a/modules/bcftools/index/functions.nf b/modules/bcftools/index/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bcftools/index/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf new file mode 100644 index 00000000..d67614d8 --- /dev/null +++ b/modules/bcftools/index/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BCFTOOLS_INDEX { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" + } else { + container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.csi"), optional:true, emit: csi + tuple val(meta), path("*.tbi"), optional:true, emit: tbi + path "versions.yml" , emit: version + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + bcftools \\ + index \\ + $options.args \\ + --threads $task.cpus \\ + $vcf + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/bcftools/index/meta.yml b/modules/bcftools/index/meta.yml new file mode 100644 index 00000000..6fc7df17 --- /dev/null +++ b/modules/bcftools/index/meta.yml @@ -0,0 +1,49 @@ +name: bcftools_index +description: Index VCF tools +keywords: + - vcf + - index + - bcftools + - csi + - tbi +tools: + - bcftools: + description: BCFtools is a set of utilities that manipulate variant calls in the Variant Call Format (VCF) and its binary counterpart BCF. All commands work transparently with both VCFs and BCFs, both uncompressed and BGZF-compressed. Most commands accept VCF, bgzipped VCF and BCF with filetype detected automatically even when streaming from a pipe. Indexed VCF and BCF will work in all situations. Un-indexed VCF and BCF and streams will work in most, but not all situations. + homepage: https://samtools.github.io/bcftools/ + documentation: https://samtools.github.io/bcftools/howtos/index.html + tool_dev_url: https://github.com/samtools/bcftools + doi: "10.1093/gigascience/giab008" + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - VCF: + type: file + description: VCF file (optionally GZIPPED) + pattern: "*.{vcf,vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - csi: + type: file + description: Default VCF file index file + pattern: "*.csi" + - tbi: + type: file + description: Alternative VCF file index file for larger files (activated with -t parameter) + pattern: "*.tbi" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index decce4be..747efd3a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -70,6 +70,10 @@ bcftools/filter: - modules/bcftools/filter/** - tests/modules/bcftools/filter/** +bcftools/index: + - modules/bcftools/index/** + - tests/modules/bcftools/index** + bcftools/isec: - modules/bcftools/isec/** - tests/modules/bcftools/isec/** diff --git a/tests/modules/bcftools/index/main.nf b/tests/modules/bcftools/index/main.nf new file mode 100644 index 00000000..73909d66 --- /dev/null +++ b/tests/modules/bcftools/index/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [:] ) +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [args: '-t'] ) + + +workflow test_bcftools_index_csi { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + BCFTOOLS_INDEX_CSI ( input ) +} + +workflow test_bcftools_index_tbi { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + BCFTOOLS_INDEX_TBI ( input ) +} diff --git a/tests/modules/bcftools/index/test.yml b/tests/modules/bcftools/index/test.yml new file mode 100644 index 00000000..36c5f3c0 --- /dev/null +++ b/tests/modules/bcftools/index/test.yml @@ -0,0 +1,17 @@ +- name: bcftools index + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/index + files: + - path: output/bcftools/test.vcf.gz.csi + md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd + +- name: bcftools index tbi + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c tests/config/nextflow.config + tags: + - bcftools + - bcftools/index + files: + - path: output/bcftools/test.vcf.gz.tbi + md5sum: 36e11bf96ed0af4a92caa91a68612d64 From d1794d19346dcbc0812e97c1e7b5516d19b88d12 Mon Sep 17 00:00:00 2001 From: Mei Wu Date: Wed, 13 Oct 2021 12:59:35 +0200 Subject: [PATCH 142/314] Add TIDDIT cov (#822) * added template for tiddit/cov * test finished * quick fix to meta info * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * applying suggestions Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/tiddit/cov/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/tiddit/cov/main.nf | 49 +++++++++++++++++++ modules/tiddit/cov/meta.yml | 52 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/tiddit/cov/main.nf | 23 +++++++++ tests/modules/tiddit/cov/test.yml | 17 +++++++ 6 files changed, 223 insertions(+) create mode 100644 modules/tiddit/cov/functions.nf create mode 100644 modules/tiddit/cov/main.nf create mode 100644 modules/tiddit/cov/meta.yml create mode 100644 tests/modules/tiddit/cov/main.nf create mode 100644 tests/modules/tiddit/cov/test.yml diff --git a/modules/tiddit/cov/functions.nf b/modules/tiddit/cov/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/tiddit/cov/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf new file mode 100644 index 00000000..a3a8a171 --- /dev/null +++ b/modules/tiddit/cov/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process TIDDIT_COV { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" + } else { + container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" + } + + input: + tuple val(meta), path(bam) + path fasta + + output: + tuple val(meta), path("*.tab"), optional: true, emit: cov + tuple val(meta), path("*.wig"), optional: true, emit: wig + + path "versions.yml" , emit: versions + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + def reference = fasta ? "--ref $fasta" : "" + """ + tiddit \\ + --cov \\ + -o $prefix \\ + $options.args \\ + --bam $bam \\ + $reference + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/tiddit/cov/meta.yml b/modules/tiddit/cov/meta.yml new file mode 100644 index 00000000..d925b783 --- /dev/null +++ b/modules/tiddit/cov/meta.yml @@ -0,0 +1,52 @@ +name: tiddit_cov +description: Computes the coverage of different regions from the bam file. +keywords: + - coverage + - bam + - statistics + - chromosomal rearrangements +tools: + - tiddit: + description: TIDDIT - structural variant calling. + homepage: https://github.com/SciLifeLab/TIDDIT + documentation: https://github.com/SciLifeLab/TIDDIT/blob/master/README.md + doi: "10.12688/f1000research.11168.1" + licence: ["GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM file + pattern: "*.{bam,cram}" + - fasta: + type: file + description: | + Reference genome file. Only needed when passing in CRAM instead of BAM. + If not using CRAM, please pass an empty file instead. + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cov: + type: file + description: The coverage of different regions. Optional. + pattern: "*.tab" + - wig: + type: file + description: The coverage of different regions in WIG format. Optional. + pattern: "*.wig" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@projectoriented" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 747efd3a..6bc07f92 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1039,6 +1039,10 @@ tabix/tabix: - modules/tabix/tabix/** - tests/modules/tabix/tabix/** +tiddit/cov: + - modules/tiddit/cov/** + - tests/modules/tiddit/cov/** + tiddit/sv: - modules/tiddit/sv/** - tests/modules/tiddit/sv/** diff --git a/tests/modules/tiddit/cov/main.nf b/tests/modules/tiddit/cov/main.nf new file mode 100644 index 00000000..aed3516c --- /dev/null +++ b/tests/modules/tiddit/cov/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' addParams( options: [:] ) + +workflow test_tiddit_cov { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + TIDDIT_COV ( input, fasta ) +} + +workflow test_tiddit_cov_no_ref { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + TIDDIT_COV ( input, [] ) +} diff --git a/tests/modules/tiddit/cov/test.yml b/tests/modules/tiddit/cov/test.yml new file mode 100644 index 00000000..c2aa6439 --- /dev/null +++ b/tests/modules/tiddit/cov/test.yml @@ -0,0 +1,17 @@ +- name: tiddit cov test_tiddit_cov + command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov -c tests/config/nextflow.config + tags: + - tiddit + - tiddit/cov + files: + - path: output/tiddit/test.tab + md5sum: f7974948f809f94879d8a60b726194f5 + +- name: tiddit cov test_tiddit_cov_no_ref + command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c tests/config/nextflow.config + tags: + - tiddit + - tiddit/cov + files: + - path: output/tiddit/test.tab + md5sum: f7974948f809f94879d8a60b726194f5 From c912117972a8872b4d045e0bc1a16fb7c14806a6 Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Thu, 14 Oct 2021 11:50:31 +0200 Subject: [PATCH 143/314] feat: add sub-workflow for SRA (#836) * feat: add sub-workflow for SRA * Combine prefetch and fasterq-dump into one sub-workflow * tests: add sub-workflow to pytest config --- subworkflows/nf-core/sra_fastq/main.nf | 34 +++++++++++++++++ subworkflows/nf-core/sra_fastq/meta.yml | 37 +++++++++++++++++++ .../nf-core/sra_fastq/nextflow.config | 2 + tests/config/pytest_subworkflows.yml | 5 +++ tests/subworkflows/nf-core/sra_fastq/main.nf | 23 ++++++++++++ tests/subworkflows/nf-core/sra_fastq/test.yml | 25 +++++++++++++ 6 files changed, 126 insertions(+) create mode 100644 subworkflows/nf-core/sra_fastq/main.nf create mode 100644 subworkflows/nf-core/sra_fastq/meta.yml create mode 100644 subworkflows/nf-core/sra_fastq/nextflow.config create mode 100644 tests/subworkflows/nf-core/sra_fastq/main.nf create mode 100644 tests/subworkflows/nf-core/sra_fastq/test.yml diff --git a/subworkflows/nf-core/sra_fastq/main.nf b/subworkflows/nf-core/sra_fastq/main.nf new file mode 100644 index 00000000..ffa380d9 --- /dev/null +++ b/subworkflows/nf-core/sra_fastq/main.nf @@ -0,0 +1,34 @@ +// +// Download FASTQ sequencing reads from the NCBI's Sequence Read Archive (SRA). +// + +params.prefetch_options = [:] +params.fasterqdump_options = [:] + +include { SRATOOLS_PREFETCH } from '../../../modules/sratools/prefetch/main' addParams( options: params.prefetch_options ) +include { SRATOOLS_FASTERQDUMP } from '../../../modules/sratools/fasterqdump/main' addParams( options: params.fasterqdump_options ) + +workflow SRA_FASTQ { + take: + sra_ids // channel: [ val(meta), val(id) ] + + main: + + ch_versions = Channel.empty() + + // + // Prefetch sequencing reads in SRA format. + // + SRATOOLS_PREFETCH ( sra_ids ) + ch_versions = ch_versions.mix( SRATOOLS_PREFETCH.out.versions.first() ) + + // + // Convert the SRA format into one or more compressed FASTQ files. + // + SRATOOLS_FASTERQDUMP ( SRATOOLS_PREFETCH.out.sra ) + ch_versions = ch_versions.mix( SRATOOLS_FASTERQDUMP.out.versions.first() ) + + emit: + reads = SRATOOLS_FASTERQDUMP.out.reads // channel: [ val(meta), [ reads ] ] + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/sra_fastq/meta.yml b/subworkflows/nf-core/sra_fastq/meta.yml new file mode 100644 index 00000000..3db93257 --- /dev/null +++ b/subworkflows/nf-core/sra_fastq/meta.yml @@ -0,0 +1,37 @@ +name: sra_fastq +description: Download FASTQ sequencing reads from the NCBI's Sequence Read Archive (SRA). +keywords: + - sequencing + - FASTQ + - prefetch + - dump +modules: + - sratools/prefetch + - sratools/fasterqdump +input: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - id: + type: string + description: > + SRA identifier. +# TODO Update when we decide on a standard for subworkflow docs +output: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Extracted FASTQ file or files if the sequencing reads are paired-end. + pattern: "*.fastq.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - '@Midnighter' diff --git a/subworkflows/nf-core/sra_fastq/nextflow.config b/subworkflows/nf-core/sra_fastq/nextflow.config new file mode 100644 index 00000000..07448834 --- /dev/null +++ b/subworkflows/nf-core/sra_fastq/nextflow.config @@ -0,0 +1,2 @@ +params.prefetch_options = [:] +params.fasterqdump_options = [:] diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml index a8ac84dc..84919be8 100644 --- a/tests/config/pytest_subworkflows.yml +++ b/tests/config/pytest_subworkflows.yml @@ -9,3 +9,8 @@ subworkflows/bam_stats_samtools: subworkflows/bam_sort_samtools: - subworkflows/nf-core/bam_sort_samtools/** - tests/subworkflows/nf-core/bam_sort_samtools/** + +subworkflows/sra_fastq: + - subworkflows/nf-core/sra_fastq/** + - tests/subworkflows/nf-core/sra_fastq/** + diff --git a/tests/subworkflows/nf-core/sra_fastq/main.nf b/tests/subworkflows/nf-core/sra_fastq/main.nf new file mode 100644 index 00000000..988758f3 --- /dev/null +++ b/tests/subworkflows/nf-core/sra_fastq/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SRA_FASTQ } from '../../../../subworkflows/nf-core/sra_fastq/main.nf' addParams( [:] ) + +workflow test_sra_fastq_single_end { + input = [ + [ id:'test_single_end', single_end:true ], // meta map + 'SRR13255544' + ] + + SRA_FASTQ ( input ) +} + +workflow test_sra_fastq_paired_end { + input = [ + [ id:'test_paired_end', single_end:false ], // meta map + 'SRR11140744' + ] + + SRA_FASTQ ( input ) +} diff --git a/tests/subworkflows/nf-core/sra_fastq/test.yml b/tests/subworkflows/nf-core/sra_fastq/test.yml new file mode 100644 index 00000000..6f953ccf --- /dev/null +++ b/tests/subworkflows/nf-core/sra_fastq/test.yml @@ -0,0 +1,25 @@ +- name: sra fastq single-end + command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_single_end -c tests/config/nextflow.config + tags: + - subworkflows/sra_fastq + # Modules + - sratools + - sratools/prefetch + - sratools/fasterqdump + files: + - path: output/sratools/SRR13255544.fastq.gz + md5sum: 1054c7b71884acdb5eed8a378f18be82 + +- name: sra fastq paired-end + command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_paired_end -c tests/config/nextflow.config + tags: + - subworkflows/sra_fastq + # Modules + - sratools + - sratools/prefetch + - sratools/fasterqdump + files: + - path: output/sratools/SRR11140744_1.fastq.gz + md5sum: 193809c784a4ea132ab2a253fa4f55b6 + - path: output/sratools/SRR11140744_2.fastq.gz + md5sum: 3e3b3af3413f50a1685fd7b3f1456d4e From 1cf207a5b634223124ebe9dfa65e293019beda1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Thu, 14 Oct 2021 11:07:53 +0100 Subject: [PATCH 144/314] Update `isoseq3/cluster` (#856) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: Add isoseq3/cluster module * 🐛FIX: Fix reports channel and add .pbi to it * 🐛FIX: Fix report channel definition * 👌IMPROVE: Move .pbi file into reports channel * 👌IMPROVE: remove --use_qvs option from command line * 👌 IMPROVE: Add in addParams removed options from command line * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: add singletons parameter and improve outputs * 🐛 FIX: Update test with last module model * 👌 IMPROVE: Add test tag * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update test data config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Remove unused index * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 📦 NEW: Add isoseq3/cluster module * 🐛FIX: Fix reports channel and add .pbi to it * 🐛FIX: Fix report channel definition * 👌IMPROVE: Move .pbi file into reports channel * 👌IMPROVE: remove --use_qvs option from command line * 👌 IMPROVE: Add in addParams removed options from command line * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 👌 IMPROVE: The module accept one channel (primers moved into the first channel) * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: add singletons parameter and improve outputs * 🐛 FIX: Update test with last module model * 👌 IMPROVE: Add test tag * 👌 IMPROVE: Update test data config * 👌 IMPROVE: Remove pbi from input files * 👌 IMPROVE: Remove unused index * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fill contains args * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update code to new versions capture + better output channels * 👌 IMPROVE: Update with new versions.yml file * 🐛 FIX: Update meta.yml + correct typos * 👌 IMPROVE: Clean output file names + correct typo * 🐛 FIX: Remove bamtools/split module from isoseq3/cluster * 🐛 FIX: Update output filename pattern input filename and output filename were the same * 👌 IMPROVE: Update meta.yml --- modules/isoseq3/cluster/main.nf | 27 +++++++++--------- modules/isoseq3/cluster/meta.yml | 34 +++++++++++------------ tests/modules/isoseq3/cluster/test.yml | 38 +++++++++++++------------- 3 files changed, 50 insertions(+), 49 deletions(-) diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index f01af2bc..df005706 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -22,19 +22,20 @@ process ISOSEQ3_CLUSTER { tuple val(meta), path(bam) output: - tuple val(meta), path("*.bam") , emit: bam - tuple val(meta), path("*.bam.pbi") , emit: pbi - tuple val(meta), path("*.cluster") , emit: cluster - tuple val(meta), path("*.cluster_report.csv"), emit: cluster_report - tuple val(meta), path("*.transcriptset.xml") , emit: transcriptset - tuple val(meta), path("*.hq.bam") , emit: hq_bam - tuple val(meta), path("*.hq.bam.pbi") , emit: hq_pbi - tuple val(meta), path("*.lq.bam") , emit: lq_bam - tuple val(meta), path("*.lq.bam.pbi") , emit: lq_pbi - path "versions.yml" , emit: versions + tuple val(meta), path("*.transcripts.bam") , emit: bam + tuple val(meta), path("*.transcripts.bam.pbi") , emit: pbi + tuple val(meta), path("*.transcripts.cluster") , emit: cluster + tuple val(meta), path("*.transcripts.cluster_report.csv"), emit: cluster_report + tuple val(meta), path("*.transcripts.transcriptset.xml") , emit: transcriptset + path "versions.yml" , emit: versions + + tuple val(meta), path("*.transcripts.hq.bam") , optional: true, emit: hq_bam + tuple val(meta), path("*.transcripts.hq.bam.pbi") , optional: true, emit: hq_pbi + tuple val(meta), path("*.transcripts.lq.bam") , optional: true, emit: lq_bam + tuple val(meta), path("*.transcripts.lq.bam.pbi") , optional: true, emit: lq_pbi + tuple val(meta), path("*.transcripts.singletons.bam") , optional: true, emit: singletons_bam + tuple val(meta), path("*.transcripts.singletons.bam.pbi"), optional: true, emit: singletons_pbi - tuple val(meta), path("*.singletons.bam") , optional: true, emit: singletons_bam - tuple val(meta), path("*.singletons.bam.pbi"), optional: true, emit: singletons_pbi script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" @@ -42,7 +43,7 @@ process ISOSEQ3_CLUSTER { isoseq3 \\ cluster \\ $bam \\ - ${prefix}.bam \\ + ${prefix}.transcripts.bam \\ $options.args cat <<-END_VERSIONS > versions.yml diff --git a/modules/isoseq3/cluster/meta.yml b/modules/isoseq3/cluster/meta.yml index 6fadb9c4..280e0150 100644 --- a/modules/isoseq3/cluster/meta.yml +++ b/modules/isoseq3/cluster/meta.yml @@ -35,47 +35,47 @@ output: - bam: type: file description: BAM file of clustered consensus - pattern: "*.bam" + pattern: "*.transcripts.bam" - pbi: type: file description: Pacbio Index of consensus reads generated by clustering - pattern: "*.pbi" + pattern: "*.transcripts.bam.pbi" - cluster: type: file description: A two columns (from, to) file describing original read name to new read name - pattern: "*.cluster" + pattern: "*.transcripts.cluster" - cluster_report: type: file description: A table files clusters (transcripts) members (read) - pattern: "*.cluster_report.csv" + pattern: "*.transcripts.cluster_report.csv" - transcriptset: type: file description: A metadata xml file which contains full paths to data files - pattern: "*.clustered.transcriptset.xml" + pattern: "*.transcripts.transcriptset.xml" - hq_bam: type: file - description: High quality reads - pattern: "*.hq.bam" + description: High quality reads (when --use-qvs is set) + pattern: "*.transcripts.hq.bam" - hq_pbi: type: file - description: Pacbio index of high quality reads - pattern: "*.hq.bam.pbi" + description: Pacbio index of high quality reads (when --use-qvs is set) + pattern: "*.transcripts.hq.bam.pbi" - lq_bam: type: file - description: Low quality reads - pattern: "*.lq.bam" + description: Low quality reads (when --use-qvs is set) + pattern: "*.transcripts.lq.bam" - lq_pbi: type: file - description: Pacbio index of low quality reads - pattern: "*.lq.bam.pbi" + description: Pacbio index of low quality reads (when --use-qvs is set) + pattern: "*.transcripts.lq.bam.pbi" - singletons_bam: type: file - description: Unclustered reads - pattern: "*.singletons.bam" + description: Unclustered reads (when --singletons is set) + pattern: "*.transcripts.singletons.bam" - singletons_pbi: type: file - description: Pacbio index of unclustered reads - pattern: "*.singletons.bam.pbi" + description: Pacbio index of unclustered reads (when --singletons is set) + pattern: "*.transcripts.singletons.bam.pbi" authors: - "@sguizard" diff --git a/tests/modules/isoseq3/cluster/test.yml b/tests/modules/isoseq3/cluster/test.yml index cc6b6dac..58b20ae2 100644 --- a/tests/modules/isoseq3/cluster/test.yml +++ b/tests/modules/isoseq3/cluster/test.yml @@ -4,25 +4,25 @@ - isoseq3 - isoseq3/cluster files: - - path: output/isoseq3/test.bam - md5sum: ca8277f4d8fe1bba68ba266c42b46dd1 - - path: output/isoseq3/test.bam.pbi - md5sum: cbc06657b4543faba7ff886b3b12b862 - - path: output/isoseq3/test.cluster + - path: output/isoseq3/test.transcripts.bam + md5sum: eb36697688099c757ef4196f54ad7d7a + - path: output/isoseq3/test.transcripts.bam.pbi + md5sum: db70cee03421822e9b8f9fb6b228f461 + - path: output/isoseq3/test.transcripts.cluster md5sum: d5059d856763fc5591332980bfc0d57b - - path: output/isoseq3/test.cluster_report.csv + - path: output/isoseq3/test.transcripts.cluster_report.csv md5sum: 342d97dc10aedf80a45977edcb491c62 - - path: output/isoseq3/test.hq.bam - md5sum: e93ea85776c35c246364d954032c2ea9 - - path: output/isoseq3/test.hq.bam.pbi - md5sum: 5a8ea7668e8f8e173478b28cbb6ab515 - - path: output/isoseq3/test.lq.bam - md5sum: 4ea0e4f4a6cc689dcc275adcdf688fad - - path: output/isoseq3/test.lq.bam.pbi - md5sum: f5edc24711b2c8d6474d60cb69022af0 - - path: output/isoseq3/test.singletons.bam - md5sum: 73d131920bd42e1fc5fca2e6cb71f4b2 - - path: output/isoseq3/test.singletons.bam.pbi - md5sum: 73980863be4b5bda2846325c737f0b5e - - path: output/isoseq3/test.transcriptset.xml + - path: output/isoseq3/test.transcripts.hq.bam + md5sum: 4c5f4ffb429107c9c9578419e07d6987 + - path: output/isoseq3/test.transcripts.hq.bam.pbi + md5sum: 39ac3e957c8d55b0ce3f425d63baa154 + - path: output/isoseq3/test.transcripts.lq.bam + md5sum: 710ca4bc58ac039d76775460e1c822a0 + - path: output/isoseq3/test.transcripts.lq.bam.pbi + md5sum: 9b32036553bf3dced3065e6d0f36aef9 + - path: output/isoseq3/test.transcripts.singletons.bam + md5sum: 0288577406b9d1e7356de9b4d7c2bb7c + - path: output/isoseq3/test.transcripts.singletons.bam.pbi + md5sum: 8e0e0681179c0c36209b49fa60783841 + - path: output/isoseq3/test.transcripts.transcriptset.xml contains: [ 'PacBio.DataSet.TranscriptSet' ] From d0e4ee3ccd5628e412ea0120eb28266e936c6b0e Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 15 Oct 2021 10:40:16 +0200 Subject: [PATCH 145/314] Add cram files to config (#862) * add cram files to config * Update tests/config/test_data.config Co-authored-by: Harshil Patel --- tests/config/test_data.config | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index b4443e9f..50fdb858 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -143,6 +143,20 @@ params { test2_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test2.paired_end.umi_unsorted.bam" test2_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test2.paired_end.unsorted_tagged.bam" + test_paired_end_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.sorted.cram" + test_paired_end_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.sorted.cram.crai" + test_paired_end_markduplicates_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.markduplicates.sorted.cram" + test_paired_end_markduplicates_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.markduplicates.sorted.cram.crai" + test_paired_end_recalibrated_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.recalibrated.sorted.cram" + test_paired_end_recalibrated_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test.paired_end.recalibrated.sorted.cram.crai" + + test2_paired_end_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.sorted.cram" + test2_paired_end_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.sorted.cram.crai" + test2_paired_end_markduplicates_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.markduplicates.sorted.cram" + test2_paired_end_markduplicates_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.markduplicates.sorted.cram.crai" + test2_paired_end_recalibrated_sorted_cram = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.recalibrated.sorted.cram" + test2_paired_end_recalibrated_sorted_cram_crai = "${test_data_dir}/genomics/homo_sapiens/illumina/cram/test2.paired_end.recalibrated.sorted.cram.crai" + test_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_1.fastq.gz" test_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_2.fastq.gz" test_umi_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test.umi_1.fastq.gz" From b6486ef8b4355ae75c7ba7c608ee41d07aab23f6 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 15 Oct 2021 11:10:53 +0200 Subject: [PATCH 146/314] Add compressed bed files (#864) * add cram files to config * Update tests/config/test_data.config * Add compressed bed file Co-authored-by: Harshil Patel --- tests/config/test_data.config | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 50fdb858..5381a311 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -104,6 +104,8 @@ params { genome_gtf = "${test_data_dir}/genomics/homo_sapiens/genome/genome.gtf" genome_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/genome.sizes" genome_bed = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed" + genome_bed_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz" + genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" From cbfc8eb46c67e539aa4e06015466f250f2070e5f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 18 Oct 2021 15:24:49 +0200 Subject: [PATCH 147/314] Pydamage analyzer update (#863) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Update md5sum due to test-dataset update Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- tests/modules/pydamage/analyze/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml index e480c1b4..157e947f 100644 --- a/tests/modules/pydamage/analyze/test.yml +++ b/tests/modules/pydamage/analyze/test.yml @@ -5,4 +5,4 @@ - pydamage/analyze files: - path: output/pydamage/pydamage_results/pydamage_results.csv - md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 + md5sum: 37ee6b4dee6890fd2ec8550337f21ac9 From 4e9e732b76d2117bf9f6ff2afbd3950582429f07 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 18 Oct 2021 22:34:23 +0100 Subject: [PATCH 148/314] Add strelka/somatic module (#866) * Add strelka/somatic module * Fill out meta.yml properly --- modules/strelka/somatic/functions.nf | 78 +++++++++++++++++++++++ modules/strelka/somatic/main.nf | 59 ++++++++++++++++++ modules/strelka/somatic/meta.yml | 85 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 12 ++-- tests/modules/strelka/somatic/main.nf | 23 +++++++ tests/modules/strelka/somatic/test.yml | 12 ++++ 6 files changed, 265 insertions(+), 4 deletions(-) create mode 100644 modules/strelka/somatic/functions.nf create mode 100644 modules/strelka/somatic/main.nf create mode 100644 modules/strelka/somatic/meta.yml create mode 100644 tests/modules/strelka/somatic/main.nf create mode 100644 tests/modules/strelka/somatic/test.yml diff --git a/modules/strelka/somatic/functions.nf b/modules/strelka/somatic/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/strelka/somatic/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf new file mode 100644 index 00000000..35e7053f --- /dev/null +++ b/modules/strelka/somatic/main.nf @@ -0,0 +1,59 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process STRELKA_SOMATIC { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1" + } else { + container "quay.io/biocontainers/strelka:2.9.10--h9ee0642_1" + } + + input: + tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor) + path fasta + path fai + path target_bed + path target_bed_tbi + + output: + tuple val(meta), path("*.somatic_indels.vcf.gz") , emit: vcf_indels + tuple val(meta), path("*.somatic_indels.vcf.gz.tbi"), emit: vcf_indels_tbi + tuple val(meta), path("*.somatic_snvs.vcf.gz") , emit: vcf_snvs + tuple val(meta), path("*.somatic_snvs.vcf.gz.tbi") , emit: vcf_snvs_tbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def options_strelka = params.target_bed ? "--exome --callRegions ${target_bed}" : "" + """ + configureStrelkaSomaticWorkflow.py \\ + --tumor $cram_tumor \\ + --normal $cram_normal \\ + --referenceFasta $fasta \\ + $options_strelka \\ + $options.args \\ + --runDir strelka + + python strelka/runWorkflow.py -m local -j $task.cpus + + mv strelka/results/variants/somatic.indels.vcf.gz ${prefix}.somatic_indels.vcf.gz + mv strelka/results/variants/somatic.indels.vcf.gz.tbi ${prefix}.somatic_indels.vcf.gz.tbi + mv strelka/results/variants/somatic.snvs.vcf.gz ${prefix}.somatic_snvs.vcf.gz + mv strelka/results/variants/somatic.snvs.vcf.gz.tbi ${prefix}.somatic_snvs.vcf.gz.tbi + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) + END_VERSIONS + """ +} diff --git a/modules/strelka/somatic/meta.yml b/modules/strelka/somatic/meta.yml new file mode 100644 index 00000000..d9bd993a --- /dev/null +++ b/modules/strelka/somatic/meta.yml @@ -0,0 +1,85 @@ +name: strelka_somatic +description: Strelka2 is a fast and accurate small variant caller optimized for analysis of germline variation in small cohorts and somatic variation in tumor/normal sample pairs +keywords: + - variant calling + - germline + - wgs + - vcf + - variants +tools: + - strelka: + description: Strelka calls somatic and germline small variants from mapped sequencing reads + homepage: https://github.com/Illumina/strelka + documentation: https://github.com/Illumina/strelka/blob/v2.9.x/docs/userGuide/README.md + tool_dev_url: https://github.com/Illumina/strelka + doi: 10.1038/s41592-018-0051-x + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram_normal: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai_normal: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - cram_tumor: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai_tumor: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" + - target_bed: + type: file + description: BED file containing target regions for variant calling + pattern: "*.{bed}" + - target_bed_tbi: + type: file + description: Index for BED file containing target regions for variant calling + pattern: "*.{bed.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf_indels: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - vcf_indels_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - vcf_snvs: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - vcf_snvs_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@drpatelh" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6bc07f92..ed727e7c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -991,14 +991,14 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/prefetch: - - modules/sratools/prefetch/** - - tests/modules/sratools/prefetch/** - sratools/fasterqdump: - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** +sratools/prefetch: + - modules/sratools/prefetch/** + - tests/modules/sratools/prefetch/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** @@ -1015,6 +1015,10 @@ strelka/germline: - modules/strelka/germline/** - tests/modules/strelka/germline/** +strelka/somatic: + - modules/strelka/somatic/** + - tests/modules/strelka/somatic/** + stringtie/merge: - modules/stringtie/merge/** - tests/modules/stringtie/merge/** diff --git a/tests/modules/strelka/somatic/main.nf b/tests/modules/strelka/somatic/main.nf new file mode 100644 index 00000000..8dec808e --- /dev/null +++ b/tests/modules/strelka/somatic/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' addParams( options: [:] ) + +workflow test_strelka_somatic { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + + STRELKA_SOMATIC ( input, fasta, fai, bed, bed_tbi ) +} diff --git a/tests/modules/strelka/somatic/test.yml b/tests/modules/strelka/somatic/test.yml new file mode 100644 index 00000000..f98b7232 --- /dev/null +++ b/tests/modules/strelka/somatic/test.yml @@ -0,0 +1,12 @@ +- name: strelka somatic test_strelka_somatic + command: nextflow run tests/modules/strelka/somatic -entry test_strelka_somatic -c tests/config/nextflow.config + tags: + - strelka + - strelka/somatic + files: + - path: output/strelka/test.somatic_indels.vcf.gz + - path: output/strelka/test.somatic_indels.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + - path: output/strelka/test.somatic_snvs.vcf.gz + - path: output/strelka/test.somatic_snvs.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 From 97fe899f792b2188737cb0f0075219feb22c2b2c Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 20 Oct 2021 10:02:30 +0200 Subject: [PATCH 149/314] fix: reduce number of required input files for damage profiler (#612) * Reduce number of required input files for damage profiler * Remove rebugging * Add optional species list file. * Working pending updated test-dataset update * Add genome header to config --- modules/damageprofiler/main.nf | 17 +++-- modules/damageprofiler/meta.yml | 8 ++- tests/config/test_data.config | 26 ++++---- tests/modules/damageprofiler/main.nf | 34 ++++++++-- tests/modules/damageprofiler/test.yml | 94 ++++++++++++++++++++++++--- 5 files changed, 144 insertions(+), 35 deletions(-) diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index 1537b019..3800a305 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -22,25 +22,30 @@ process DAMAGEPROFILER { tuple val(meta), path(bam) path fasta path fai + path specieslist output: tuple val(meta), path("${prefix}"), emit: results path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def software = getSoftwareName(task.process) + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def reference = fasta ? "-r $fasta" : "" + def species_list = specieslist ? "-sf $specieslist" : "" """ damageprofiler \\ - -i $bam \\ - -r $fasta \\ - -o $prefix/ \\ - $options.args - + -i $bam \\ + -o $prefix/ \\ + $options.args \\ + $reference \\ + $species_list cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(damageprofiler -v | sed 's/^DamageProfiler v//') END_VERSIONS """ + } diff --git a/modules/damageprofiler/meta.yml b/modules/damageprofiler/meta.yml index ff82ba09..19ba908f 100644 --- a/modules/damageprofiler/meta.yml +++ b/modules/damageprofiler/meta.yml @@ -32,12 +32,16 @@ input: pattern: "*.{bam,cram,sam}" - fasta: type: file - description: FASTA reference file + description: OPTIONAL FASTA reference file pattern: "*.{fasta,fna,fa}" - fai: type: file - description: FASTA index file from samtools faidx + description: OPTIONAL FASTA index file from samtools faidx pattern: "*.{fai}" + - specieslist: + type: file + description: OPTIONAL text file with list of target reference headers + pattern: "*.{txt}" output: - versions: diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 5381a311..6abfa4f8 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -104,6 +104,7 @@ params { genome_gtf = "${test_data_dir}/genomics/homo_sapiens/genome/genome.gtf" genome_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/genome.sizes" genome_bed = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed" + genome_header = "${test_data_dir}/genomics/homo_sapiens/genome/genome.header" genome_bed_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz" genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" @@ -119,18 +120,19 @@ params { repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } 'illumina' { - test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" - test_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam.bai" - test_paired_end_markduplicates_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam" - test_paired_end_markduplicates_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam.bai" - test_paired_end_recalibrated_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.recalibrated.sorted.bam" - test_paired_end_recalibrated_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.recalibrated.sorted.bam.bai" - test_paired_end_umi_consensus_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_consensus.bam" - test_paired_end_umi_converted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_converted.bam" - test_paired_end_umi_grouped_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_grouped.bam" - test_paired_end_umi_histogram_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_histogram.txt" - test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam" - test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam" + test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" + test_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam.bai" + test_paired_end_markduplicates_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam" + test_paired_end_markduplicates_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam.bai" + test_paired_end_markduplicates_sorted_referencesn_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.referencesn.txt" + test_paired_end_recalibrated_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.recalibrated.sorted.bam" + test_paired_end_recalibrated_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.recalibrated.sorted.bam.bai" + test_paired_end_umi_consensus_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_consensus.bam" + test_paired_end_umi_converted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_converted.bam" + test_paired_end_umi_grouped_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_grouped.bam" + test_paired_end_umi_histogram_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_histogram.txt" + test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam" + test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam" test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam" test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai" diff --git a/tests/modules/damageprofiler/main.nf b/tests/modules/damageprofiler/main.nf index 5b128770..36ae7b24 100644 --- a/tests/modules/damageprofiler/main.nf +++ b/tests/modules/damageprofiler/main.nf @@ -6,10 +6,34 @@ include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' addPar workflow test_damageprofiler { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_markduplicates_sorted_bam'], checkIfExists: true) ] ] + fasta = [] + fai = [] + species_list = [] - DAMAGEPROFILER ( input, fasta, fai ) + + DAMAGEPROFILER ( input, fasta, fai, species_list ) +} + +workflow test_damageprofiler_reference { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_markduplicates_sorted_bam'], checkIfExists: true) ] ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + species_list = [] + + DAMAGEPROFILER ( input, fasta, fai, species_list ) +} + +workflow test_damageprofiler_specieslist { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_markduplicates_sorted_bam'], checkIfExists: true) ] ] + fasta = [] + fai = [] + species_list = file(params.test_data['homo_sapiens']['genome']['genome_header'], checkIfExists: true) + + DAMAGEPROFILER ( input, fasta, fai, species_list ) } diff --git a/tests/modules/damageprofiler/test.yml b/tests/modules/damageprofiler/test.yml index 357647be..9ef964dc 100644 --- a/tests/modules/damageprofiler/test.yml +++ b/tests/modules/damageprofiler/test.yml @@ -4,13 +4,13 @@ - damageprofiler files: - path: output/damageprofiler/test/3p_freq_misincorporations.txt - md5sum: da4cac90c78899a7cb6d72d415392b49 + md5sum: de3b84d946a6b63cdcfadf82bf6854c0 - path: output/damageprofiler/test/3pGtoA_freq.txt - md5sum: 8dab75d51a4b943b501d0995169c767f + md5sum: 61c903b1504ed7d7182570dfc75e4498 - path: output/damageprofiler/test/5pCtoT_freq.txt - md5sum: fcc48ee5f72edff930d627c8bfdd8a5b + md5sum: 15a75b60ee519b61ce04a83fe3afe855 - path: output/damageprofiler/test/5p_freq_misincorporations.txt - md5sum: 54665474f5ef17dcc268567e5eaa7d86 + md5sum: 3b3240d6c1a3491e461b39199a9fcfe3 - path: output/damageprofiler/test/DamagePlot_five_prime.svg - path: output/damageprofiler/test/DamagePlot.pdf - path: output/damageprofiler/test/DamagePlot_three_prime.svg @@ -18,19 +18,93 @@ contains: - "FINISHED SUCCESSFULLY" - path: output/damageprofiler/test/dmgprof.json - md5sum: 98499024c7e937896e481f2d3cfbdd3e + md5sum: 2e54e712d2ae9e32c4c298e5fd8f60fe - path: output/damageprofiler/test/DNA_comp_genome.txt - md5sum: f91e70760d91a1193a27e360aaddf2fd + md5sum: fea48af1ecf491b439d36d4a919473df - path: output/damageprofiler/test/DNA_composition_sample.txt - md5sum: 1257eb3eb42484647bfba2151f9ef04f + md5sum: 9e17a0b1e5ad4eb13201cd24ad8507dd - path: output/damageprofiler/test/edit_distance.pdf - path: output/damageprofiler/test/edit_distance.svg - path: output/damageprofiler/test/editDistance.txt - md5sum: af2d2f4a99058ec56eae88ec27779e38 + md5sum: 04d14b449a5afa8b5dbff0dfa762356b - path: output/damageprofiler/test/Length_plot_combined_data.svg - path: output/damageprofiler/test/Length_plot_forward_reverse_separated.svg - path: output/damageprofiler/test/Length_plot.pdf - path: output/damageprofiler/test/lgdistribution.txt - md5sum: c5d029bf3a92b613310ee23f47d94981 + md5sum: df2e19195185ea9ee05e8e84b2948f36 - path: output/damageprofiler/test/misincorporation.txt - md5sum: 3aa6dd749010a492d92a815a83c196a8 + md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c + +- name: damageprofiler_reference + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c tests/config/nextflow.config -dump-channels + tags: + - damageprofiler + files: + - path: output/damageprofiler/test/3p_freq_misincorporations.txt + md5sum: de3b84d946a6b63cdcfadf82bf6854c0 + - path: output/damageprofiler/test/3pGtoA_freq.txt + md5sum: 61c903b1504ed7d7182570dfc75e4498 + - path: output/damageprofiler/test/5pCtoT_freq.txt + md5sum: 15a75b60ee519b61ce04a83fe3afe855 + - path: output/damageprofiler/test/5p_freq_misincorporations.txt + md5sum: 3b3240d6c1a3491e461b39199a9fcfe3 + - path: output/damageprofiler/test/DamagePlot_five_prime.svg + - path: output/damageprofiler/test/DamagePlot.pdf + - path: output/damageprofiler/test/DamagePlot_three_prime.svg + - path: output/damageprofiler/test/DamageProfiler.log + contains: + - "FINISHED SUCCESSFULLY" + - path: output/damageprofiler/test/dmgprof.json + md5sum: 2e54e712d2ae9e32c4c298e5fd8f60fe + - path: output/damageprofiler/test/DNA_comp_genome.txt + md5sum: fea48af1ecf491b439d36d4a919473df + - path: output/damageprofiler/test/DNA_composition_sample.txt + md5sum: 9e17a0b1e5ad4eb13201cd24ad8507dd + - path: output/damageprofiler/test/edit_distance.pdf + - path: output/damageprofiler/test/edit_distance.svg + - path: output/damageprofiler/test/editDistance.txt + md5sum: 04d14b449a5afa8b5dbff0dfa762356b + - path: output/damageprofiler/test/Length_plot_combined_data.svg + - path: output/damageprofiler/test/Length_plot_forward_reverse_separated.svg + - path: output/damageprofiler/test/Length_plot.pdf + - path: output/damageprofiler/test/lgdistribution.txt + md5sum: df2e19195185ea9ee05e8e84b2948f36 + - path: output/damageprofiler/test/misincorporation.txt + md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c + +- name: damageprofiler_specieslist + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c tests/config/nextflow.config -dump-channels + tags: + - damageprofiler + files: + - path: output/damageprofiler/test/chr22/3p_freq_misincorporations.txt + md5sum: de3b84d946a6b63cdcfadf82bf6854c0 + - path: output/damageprofiler/test/chr22/3pGtoA_freq.txt + md5sum: 61c903b1504ed7d7182570dfc75e4498 + - path: output/damageprofiler/test/chr22/5pCtoT_freq.txt + md5sum: 15a75b60ee519b61ce04a83fe3afe855 + - path: output/damageprofiler/test/chr22/5p_freq_misincorporations.txt + md5sum: 3b3240d6c1a3491e461b39199a9fcfe3 + - path: output/damageprofiler/test/chr22/DamagePlot_five_prime.svg + - path: output/damageprofiler/test/chr22/DamagePlot.pdf + - path: output/damageprofiler/test/chr22/DamagePlot_three_prime.svg + - path: output/damageprofiler/test/DamageProfiler.log + contains: + - "FINISHED SUCCESSFULLY" + - path: output/damageprofiler/test/chr22/dmgprof.json + md5sum: 2e54e712d2ae9e32c4c298e5fd8f60fe + - path: output/damageprofiler/test/chr22/DNA_comp_genome.txt + md5sum: fea48af1ecf491b439d36d4a919473df + - path: output/damageprofiler/test/chr22/DNA_composition_sample.txt + md5sum: 9e17a0b1e5ad4eb13201cd24ad8507dd + - path: output/damageprofiler/test/chr22/edit_distance.pdf + - path: output/damageprofiler/test/chr22/edit_distance.svg + - path: output/damageprofiler/test/chr22/editDistance.txt + md5sum: 04d14b449a5afa8b5dbff0dfa762356b + - path: output/damageprofiler/test/chr22/Length_plot_combined_data.svg + - path: output/damageprofiler/test/chr22/Length_plot_forward_reverse_separated.svg + - path: output/damageprofiler/test/chr22/Length_plot.pdf + - path: output/damageprofiler/test/chr22/lgdistribution.txt + md5sum: df2e19195185ea9ee05e8e84b2948f36 + - path: output/damageprofiler/test/chr22/misincorporation.txt + md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c From eb04a0f1f69a687b596109efb80f28c17b667eba Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Wed, 20 Oct 2021 15:19:31 +0200 Subject: [PATCH 150/314] New module: freebayes (#818) * add pydamage module Co-authored-by: James A. Fellows Yates --- modules/freebayes/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/freebayes/main.nf | 79 ++++++++++++++++++++++++++++++++ modules/freebayes/meta.yml | 78 +++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/freebayes/main.nf | 35 ++++++++++++++ tests/modules/freebayes/test.yml | 15 ++++++ 6 files changed, 289 insertions(+) create mode 100644 modules/freebayes/functions.nf create mode 100644 modules/freebayes/main.nf create mode 100644 modules/freebayes/meta.yml create mode 100644 tests/modules/freebayes/main.nf create mode 100644 tests/modules/freebayes/test.yml diff --git a/modules/freebayes/functions.nf b/modules/freebayes/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/freebayes/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf new file mode 100644 index 00000000..63235d8a --- /dev/null +++ b/modules/freebayes/main.nf @@ -0,0 +1,79 @@ +// Import generic module functions +include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FREEBAYES { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" + } else { + container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" + } + + input: + tuple val(meta), path(bam), path(bai) + tuple path(fasta), path(fai) + path(targets) + path(samples) + path(populations) + path(cnv) + + + output: + tuple val(meta), path("*.vcf.gz") , emit: vcf + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def targets_file = targets ? "--target ${targets}" : "" + def samples_file = samples ? "--samples ${samples}" : "" + def populations_file = populations ? "--populations ${populations}" : "" + def cnv_file = cnv ? "--cnv-map ${cnv}" : "" + if (task.cpus > 1) { + """ + freebayes-parallel \\ + <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ + -f $fasta \\ + $targets_file \\ + $samples_file \\ + $populations_file \\ + $cnv_file \\ + $options.args \\ + $bam > ${prefix}.vcf + + gzip --no-name ${prefix}.vcf + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + END_VERSIONS + """ + + } else { + """ + freebayes \\ + -f $fasta \\ + $targets_file \\ + $samples_file \\ + $populations_file \\ + $cnv_file \\ + $options.args \\ + $bam > ${prefix}.vcf + + gzip --no-name ${prefix}.vcf + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + END_VERSIONS + """ + } +} diff --git a/modules/freebayes/meta.yml b/modules/freebayes/meta.yml new file mode 100644 index 00000000..46eb5309 --- /dev/null +++ b/modules/freebayes/meta.yml @@ -0,0 +1,78 @@ +name: freebayes +description: A haplotype-based variant detector +keywords: + - variant caller + - SNP + - genotyping + - variant calling + - bayesian +tools: + - freebayes: + description: Bayesian haplotype-based polymorphism discovery and genotyping + homepage: https://github.com/freebayes/freebayes + documentation: https://github.com/freebayes/freebayes + tool_dev_url: https://github.com/freebayes/freebayes + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - bai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.bam.bai" + - fasta: + type: file + description: reference fasta file + pattern: ".{fa,fa.gz,fasta,fasta.gz}" + - fai: + type: file + description: reference fasta file index + pattern: "*.fai" + - targets: + type: file + description: Optional - Limit analysis to targets listed in this BED-format FILE. + pattern: "*.bed" + - samples: + type: file + description: Optional - Limit analysis to samples listed (one per line) in the FILE. + pattern: "*.txt" + - populations: + type: file + description: Optional - Each line of FILE should list a sample and a population which it is part of. + pattern: "*.txt" + - cnv: + type: file + description: | + A copy number map BED file, which has + either a sample-level ploidy: + sample_name copy_number + or a region-specific format: + seq_name start end sample_name copy_number + pattern: "*.bed" + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - vcf: + type: file + description: Compressed VCF file + pattern: "*.vcf.gz" +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ed727e7c..8f030bd8 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -382,6 +382,10 @@ flash: - modules/flash/** - tests/modules/flash/** +freebayes: + - modules/freebayes/** + - tests/modules/freebayes/** + gatk4/applybqsr: - modules/gatk4/applybqsr/** - tests/modules/gatk4/applybqsr/** diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf new file mode 100644 index 00000000..1c07b821 --- /dev/null +++ b/tests/modules/freebayes/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) + +workflow test_freebayes { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + reference = [file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)] + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES ( input, reference, targets, samples, populations, cnv) +} + +workflow test_freebayes_bed { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + reference = [file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)] + targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + samples = [] + populations = [] + cnv = [] + + FREEBAYES ( input, reference, targets, samples, populations, cnv) +} diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml new file mode 100644 index 00000000..fb5af1ea --- /dev/null +++ b/tests/modules/freebayes/test.yml @@ -0,0 +1,15 @@ +- name: freebayes test_freebayes + command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: e8de5fe0025e331b939c2a849290f325 + +- name: freebayes test_freebayes_bed + command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 1c41cbec0cfa15002ce91b869ce9d519 From b5fa91d0f75831ec9871b7a181b61e2c12dcaa68 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 21 Oct 2021 11:32:05 +0200 Subject: [PATCH 151/314] Check only for file being generated (#879) --- tests/modules/freebayes/test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml index fb5af1ea..9ca54021 100644 --- a/tests/modules/freebayes/test.yml +++ b/tests/modules/freebayes/test.yml @@ -4,7 +4,6 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: e8de5fe0025e331b939c2a849290f325 - name: freebayes test_freebayes_bed command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config @@ -12,4 +11,4 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: 1c41cbec0cfa15002ce91b869ce9d519 + From 4d89d6b2f0c478ac7ef80ece5cad44fdf7ffd614 Mon Sep 17 00:00:00 2001 From: Chris Cheshire Date: Thu, 21 Oct 2021 12:28:59 +0100 Subject: [PATCH 152/314] I accidently deleted a branch before the PR merged for PR 800 (#881) * hifiasm copied from fastqc * hifiasm tests init from fastqc * meta.yml init; test.yml and main.nf for printing version * Add hifiasm version printing * Removed spaced on an empty line * Reverted hifiasm from main * Updated seacr callpeak to include a control threshold * Whitespace Co-authored-by: Sviatoslav Sidorov Co-authored-by: Svyatoslav Sidorov --- modules/seacr/callpeak/main.nf | 13 +++++++------ modules/seacr/callpeak/meta.yml | 5 +++++ tests/config/test_data.config | 3 +++ tests/modules/seacr/callpeak/main.nf | 20 ++++++++++++++------ tests/modules/seacr/callpeak/test.yml | 11 ++++++++++- 5 files changed, 39 insertions(+), 13 deletions(-) diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 4c3fd922..97bf1c0b 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -13,29 +13,30 @@ process SEACR_CALLPEAK { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::seacr=1.3 conda-forge::r-base=4.0.2 bioconda::bedtools=2.29.2" : null) + conda (params.enable_conda ? "bioconda::seacr=1.3 conda-forge::r-base=4.0.2 bioconda::bedtools=2.30.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:5bb5ed4307a8187a7f34730b00431de93688fa59-0" + container "https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0" } else { - container 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:5bb5ed4307a8187a7f34730b00431de93688fa59-0' + container 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' } input: tuple val(meta), path(bedgraph), path(ctrlbedgraph) + val (threshold) output: tuple val(meta), path("*.bed"), emit: bed path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ $bedgraph \\ - $ctrlbedgraph \\ + $function_switch \\ $options.args \\ $prefix - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo $VERSION) diff --git a/modules/seacr/callpeak/meta.yml b/modules/seacr/callpeak/meta.yml index 43044c2f..22db567d 100644 --- a/modules/seacr/callpeak/meta.yml +++ b/modules/seacr/callpeak/meta.yml @@ -1,3 +1,4 @@ + name: seacr_callpeak description: Call peaks using SEACR on sequenced reads in bedgraph format keywords: @@ -30,6 +31,10 @@ input: type: file description: | Control (IgG) data bedgraph file to generate an empirical threshold for peak calling. + - threshold: + type: value + description: | + Threshold value used to call peaks if the ctrlbedgraph input is set to []. Set to 1 if using a control bedgraph output: - meta: type: map diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6abfa4f8..744282cc 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -200,6 +200,9 @@ params { test_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test.yak" test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" + + cutandrun_bedgraph_test_1 = "${test_data_dir}/genomics/homo_sapiens/illumina/bedgraph/cutandtag_h3k27me3_test_1.bedGraph" + cutandrun_bedgraph_test_2 = "${test_data_dir}/genomics/homo_sapiens/illumina/bedgraph/cutandtag_igg_test_1.bedGraph" } 'pacbio' { primers = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/primers.fasta" diff --git a/tests/modules/seacr/callpeak/main.nf b/tests/modules/seacr/callpeak/main.nf index 7e9cef8a..a1aeb76e 100644 --- a/tests/modules/seacr/callpeak/main.nf +++ b/tests/modules/seacr/callpeak/main.nf @@ -5,11 +5,19 @@ nextflow.enable.dsl = 2 include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' addParams( options: [ args:'norm stringent' ] ) workflow test_seacr_callpeak { - input = [ - [ id:'test_1'], - file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/bedgraph/K27me3_1_to_chr20.bedgraph", checkIfExists: true), - file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/bedgraph/IgG_1_to_chr20.bedgraph", checkIfExists: true) - ] + input = [ [ id:'test_1'], + file(params.test_data['homo_sapiens']['illumina']['cutandrun_bedgraph_test_1'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['cutandrun_bedgraph_test_2'], checkIfExists: true) + ] - SEACR_CALLPEAK ( input ) + SEACR_CALLPEAK ( input, 0.05 ) } + +workflow test_seacr_callpeak_threshold { + input = [ [ id:'test_1'], + file(params.test_data['homo_sapiens']['illumina']['cutandrun_bedgraph_test_1'], checkIfExists: true), + [] + ] + + SEACR_CALLPEAK ( input, 0.05 ) +} \ No newline at end of file diff --git a/tests/modules/seacr/callpeak/test.yml b/tests/modules/seacr/callpeak/test.yml index 4b9790a4..2cf75b06 100644 --- a/tests/modules/seacr/callpeak/test.yml +++ b/tests/modules/seacr/callpeak/test.yml @@ -5,4 +5,13 @@ - seacr/callpeak files: - path: output/seacr/test_1.stringent.bed - md5sum: 3ac70475669eb6a7b8ca89e19a08a28e \ No newline at end of file + md5sum: a3cb0c7c4ffa895788da3f0d6371b7df + +- name: seacr callpeak threshold + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c tests/config/nextflow.config + tags: + - seacr + - seacr/callpeak + files: + - path: output/seacr/test_1.stringent.bed + md5sum: 1d23015c7087f7b48cc3139d53fd3463 \ No newline at end of file From 4ed5e4eff30922a1ba998441760d1f3537ca96b8 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Thu, 21 Oct 2021 17:04:15 +0100 Subject: [PATCH 153/314] New module: ucsc/liftover (#868) * add liftOver module * add liftover module tests * fix getProcessName * fix tests * fix out of date function * version numbers should be numeric * drop versions.yml from test.yml * Update modules/ucsc/liftover/main.nf Remove software name variable Co-authored-by: Jose Espinosa-Carrasco * Update tests/modules/ucsc/liftover/main.nf Use test chain file Co-authored-by: Jose Espinosa-Carrasco * add genome_chain_gz to test data config * update md5sum for new chain test data * Fix indentation in file declaration Co-authored-by: Jose Espinosa-Carrasco --- modules/ucsc/liftover/functions.nf | 78 ++++++++++++++++++++++++++++ modules/ucsc/liftover/main.nf | 48 +++++++++++++++++ modules/ucsc/liftover/meta.yml | 45 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/ucsc/liftover/main.nf | 14 +++++ tests/modules/ucsc/liftover/test.yml | 10 ++++ 7 files changed, 200 insertions(+) create mode 100644 modules/ucsc/liftover/functions.nf create mode 100644 modules/ucsc/liftover/main.nf create mode 100644 modules/ucsc/liftover/meta.yml create mode 100644 tests/modules/ucsc/liftover/main.nf create mode 100644 tests/modules/ucsc/liftover/test.yml diff --git a/modules/ucsc/liftover/functions.nf b/modules/ucsc/liftover/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ucsc/liftover/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf new file mode 100644 index 00000000..3739a1e5 --- /dev/null +++ b/modules/ucsc/liftover/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '377' + +process UCSC_LIFTOVER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ucsc-liftover=377" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3" + } else { + container "quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3" + } + + input: + tuple val(meta), path(bed) + path(chain) + + output: + tuple val(meta), path("*.lifted.bed") , emit: lifted + tuple val(meta), path("*.unlifted.bed"), emit: unlifted + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + liftOver \\ + $options.args \ + $bed \\ + $chain \\ + ${prefix}.lifted.bed \\ + ${prefix}.unlifted.bed + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo "$VERSION") + END_VERSIONS + """ +} diff --git a/modules/ucsc/liftover/meta.yml b/modules/ucsc/liftover/meta.yml new file mode 100644 index 00000000..5c2febdc --- /dev/null +++ b/modules/ucsc/liftover/meta.yml @@ -0,0 +1,45 @@ +name: ucsc_liftover +description: convert between genome builds +keywords: + - liftOver +tools: + - ucsc: + description: Move annotations from one assembly to another + homepage: http://hgdownload.cse.ucsc.edu/admin/exe/ + documentation: None + tool_dev_url: None + doi: "" + licence: ['varies; see http://genome.ucsc.edu/license'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: Browser Extensible Data (BED) file + pattern: "*.{bed}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - lifted: + type: file + description: BED file containing successfully lifted variants + pattern: "*.{lifted.bed}" + - unlifted: + type: file + description: BED file containing variants that couldn't be lifted + pattern: "*.{unlifted.bed}" + +authors: + - "@nebfield" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8f030bd8..99eb271c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1075,6 +1075,10 @@ ucsc/bigwigaverageoverbed: - modules/ucsc/bigwigaverageoverbed/** - tests/modules/ucsc/bigwigaverageoverbed/** +ucsc/liftover: + - modules/ucsc/liftover/** + - tests/modules/ucsc/liftover/** + ucsc/wigtobigwig: - modules/ucsc/wigtobigwig/** - tests/modules/ucsc/wigtobigwig/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 744282cc..3c0308a0 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -109,6 +109,7 @@ params { genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" + genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz" dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi" diff --git a/tests/modules/ucsc/liftover/main.nf b/tests/modules/ucsc/liftover/main.nf new file mode 100644 index 00000000..9670759a --- /dev/null +++ b/tests/modules/ucsc/liftover/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' addParams( options: [:] ) + +workflow test_ucsc_liftover { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)] + chain = file(params.test_data['homo_sapiens']['genome']['genome_chain_gz'], checkIfExists: true) + + UCSC_LIFTOVER ( input, chain ) +} diff --git a/tests/modules/ucsc/liftover/test.yml b/tests/modules/ucsc/liftover/test.yml new file mode 100644 index 00000000..74df6512 --- /dev/null +++ b/tests/modules/ucsc/liftover/test.yml @@ -0,0 +1,10 @@ +- name: ucsc liftover test_ucsc_liftover + command: nextflow run tests/modules/ucsc/liftover -entry test_ucsc_liftover -c tests/config/nextflow.config + tags: + - ucsc + - ucsc/liftover + files: + - path: output/ucsc/test.lifted.bed + md5sum: fd5878470257a8a0edeaa8b9374bd520 + - path: output/ucsc/test.unlifted.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e From 32f6191aca1e15e74f12a6371f648e6eba42e513 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Fri, 22 Oct 2021 12:22:02 +0200 Subject: [PATCH 154/314] New module: genrich (#877) * Add genrich module * Rearrange genrich module from genrich/genrich to genrich * Remove copy/paste code * Fix meta.yml * Implement save_duplicates independently of -r opt --- modules/genrich/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/genrich/main.nf | 69 +++++++++++++++++++++++++++++ modules/genrich/meta.yml | 71 ++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/genrich/main.nf | 44 +++++++++++++++++++ tests/modules/genrich/test.yml | 39 +++++++++++++++++ 7 files changed, 307 insertions(+) create mode 100644 modules/genrich/functions.nf create mode 100644 modules/genrich/main.nf create mode 100644 modules/genrich/meta.yml create mode 100644 tests/modules/genrich/main.nf create mode 100644 tests/modules/genrich/test.yml diff --git a/modules/genrich/functions.nf b/modules/genrich/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/genrich/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf new file mode 100644 index 00000000..c947e9cf --- /dev/null +++ b/modules/genrich/main.nf @@ -0,0 +1,69 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GENRICH { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::genrich=0.6.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1" + } else { + container "quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1" + } + + input: + tuple val(meta), path(treatment_bam) + path control_bam + path blacklist_bed + + output: + tuple val(meta), path("*narrowPeak") , emit: peaks + tuple val(meta), path("*pvalues.bedGraph"), optional:true, emit: bedgraph_pvalues + tuple val(meta), path("*pileup.bedGraph") , optional:true, emit: bedgraph_pileup + tuple val(meta), path("*intervals.bed") , optional:true, emit: bed_intervals + tuple val(meta), path("*duplicates.txt") , optional:true, emit: duplicates + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def control = params.control_bam ? "-c $control_bam" : '' + def pvalues = params.pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" + def pileup = params.pileup ? "-k ${prefix}.pileup.bedGraph" : "" + def bed = params.bed ? "-b ${prefix}.intervals.bed" : "" + def blacklist = params.blacklist_bed ? "-E $blacklist_bed" : "" + def duplicates = "" + if (params.save_duplicates) { + if (options.args.contains('-r')) { + duplicates = "-R ${prefix}.duplicates.txt" + } else { + log.info '[Genrich] Duplicates can only be saved if they are filtered, defaulting to -r option (Remove PCR duplicates).' + duplicates = "-r -R ${prefix}.duplicates.txt" + } + } + """ + Genrich \\ + -t $treatment_bam \\ + $options.args \\ + $control \\ + $blacklist \\ + -o ${prefix}.narrowPeak \\ + $pvalues \\ + $pileup \\ + $bed \\ + $duplicates \\ + $blacklist \\ + $control + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/genrich/meta.yml b/modules/genrich/meta.yml new file mode 100644 index 00000000..8f7b004b --- /dev/null +++ b/modules/genrich/meta.yml @@ -0,0 +1,71 @@ +name: genrich +description: Peak-calling for ChIP-seq and ATAC-seq enrichment experiments +keywords: + - peak-calling + - ChIP-seq + - ATAC-seq +tools: + - genrich: + description: | + Genrich is a peak-caller for genomic enrichment assays (e.g. ChIP-seq, ATAC-seq). + It analyzes alignment files generated following the assay and produces a file + detailing peaks of significant enrichment. + homepage: https://github.com/jsh58/Genrich + documentation: https://github.com/jsh58/Genrich#readme + tool_dev_url: https://github.com/jsh58/Genrich + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - treatment_bam: + type: file + description: Coordinate sorted BAM/SAM file from treatment sample + pattern: "*.{bam,sam}" + - control_bam: + type: file + description: Coordinate sorted BAM/SAM file from control sample + pattern: "*.{bam,sam}" + - blacklist_bed: + type: file + description: Bed file containing genomic intervals to exclude from the analysis + pattern: "*.{bed}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - peaks: + type: file + description: Output file is in ENCODE narrowPeak format + pattern: "*.{narrowPeak}" + - bedgraph_pvalues: + type: file + description: bedGraph file containing p/q values + pattern: "*.{pvalues.bedGraph}" + - bedgraph_pileup: + type: file + description: bedGraph file containing pileups and p-values + pattern: "*.{pileup.bedGraph}" + - bed_intervals: + type: file + description: Bed file containing annotated intervals + pattern: "*.{intervals.bed}" + - duplicates: + type: file + description: Text output file containing intervals corresponding to PCR duplicates + pattern: "*.{intervals.txt}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + +authors: + - "@JoseEspinosa" + diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 99eb271c..7093790b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -466,6 +466,10 @@ genmap/mappability: - modules/genmap/mappability/** - tests/modules/genmap/mappability/** +genrich: + - modules/genrich/** + - tests/modules/genrich/** + gffread: - modules/gffread/** - tests/modules/gffread/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 3c0308a0..1abae34d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -123,6 +123,7 @@ params { 'illumina' { test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" test_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam.bai" + test_paired_end_name_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.name.sorted.bam" test_paired_end_markduplicates_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam" test_paired_end_markduplicates_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.bam.bai" test_paired_end_markduplicates_sorted_referencesn_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.markduplicates.sorted.referencesn.txt" @@ -137,6 +138,7 @@ params { test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam" test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai" + test2_paired_end_name_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.name.sorted.bam" test2_paired_end_markduplicates_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.markduplicates.sorted.bam" test2_paired_end_markduplicates_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.markduplicates.sorted.bam.bai" test2_paired_end_recalibrated_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.recalibrated.sorted.bam" diff --git a/tests/modules/genrich/main.nf b/tests/modules/genrich/main.nf new file mode 100644 index 00000000..654b38e5 --- /dev/null +++ b/tests/modules/genrich/main.nf @@ -0,0 +1,44 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GENRICH } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:false, save_duplicates:false, options: ["args": "-p 0.1"] ) +include { GENRICH as GENRICH_BLACKLIST } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:true, save_duplicates:false, options: ["args": "-p 0.1"] ) +include { GENRICH as GENRICH_ALL_OUTPUTS } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: true, pileup:true, bed:true, blacklist_bed:false, save_duplicates:true, options: ["args": "-r -p 0.1"] ) +include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:false, save_duplicates:false, options: ["args": "-j -p 0.1"] ) + +workflow test_genrich { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] + control = [ ] + blacklist = [ ] + + GENRICH ( input, control, blacklist ) +} + +workflow test_genrich_ctrl { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] + control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] + blacklist = [ ] + + GENRICH ( input, control, blacklist ) +} + +workflow test_genrich_all_outputs { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] + control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] + blacklist = [ ] + + GENRICH_ALL_OUTPUTS ( input, control, blacklist ) +} + +workflow test_genrich_atacseq { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] + control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] + blacklist = [ ] + + GENRICH_ATACSEQ ( input, control, blacklist ) +} diff --git a/tests/modules/genrich/test.yml b/tests/modules/genrich/test.yml new file mode 100644 index 00000000..bd762f7c --- /dev/null +++ b/tests/modules/genrich/test.yml @@ -0,0 +1,39 @@ +- name: genrich test_genrich + command: nextflow run tests/modules/genrich -entry test_genrich -c tests/config/nextflow.config + tags: + - genrich + files: + - path: output/genrich/test.narrowPeak + md5sum: 6afabdd3f691c7c84c66ff8a23984681 + +- name: genrich test_genrich_ctrl + command: nextflow run tests/modules/genrich -entry test_genrich_ctrl -c tests/config/nextflow.config + tags: + - genrich + files: + - path: output/genrich/test.narrowPeak + md5sum: 6afabdd3f691c7c84c66ff8a23984681 + +- name: genrich test_genrich_all_outputs + command: nextflow run tests/modules/genrich -entry test_genrich_all_outputs -c tests/config/nextflow.config + tags: + - genrich + files: + - path: output/genrich/test.duplicates.txt + md5sum: a92893f905fd8b3751bc6a960fbfe7ba + - path: output/genrich/test.intervals.bed + md5sum: 52edf47e6641c0cc03f9cca7324f7eaa + - path: output/genrich/test.narrowPeak + md5sum: e45eb7d000387975050c2e85c164e5be + - path: output/genrich/test.pileup.bedGraph + md5sum: e4f7fa664cd4ed2cf3a1a3a9eb415e71 + - path: output/genrich/test.pvalues.bedGraph + md5sum: 564859953704983393d4b7d6317060cd + +- name: genrich test_genrich_atacseq + command: nextflow run tests/modules/genrich -entry test_genrich_atacseq -c tests/config/nextflow.config + tags: + - genrich + files: + - path: output/genrich/test.narrowPeak + md5sum: ddea556b820f8be3695ffdf6c6f70aff From 3aacd46da2b221ed47aaa05c413a828538d2c2ae Mon Sep 17 00:00:00 2001 From: Kevin Date: Fri, 22 Oct 2021 15:39:54 -0700 Subject: [PATCH 155/314] Backfill software licenses meta (#876) * backfilled modules with meta.yml that had no license identifier * harmonized BSD license names * whitespace linting at modules/unzip/meta.yml:12 * harmonized software from US NIH-NCBI/NIST to 'US-Government-Work' * Update modules/bcftools/index/meta.yml `bcftools` is dual-licensed, use associative array to allow for multiple licenses Co-authored-by: Michael L Heuer Co-authored-by: Michael L Heuer --- modules/bandage/image/meta.yml | 1 + modules/bcftools/concat/meta.yml | 1 + modules/bcftools/consensus/meta.yml | 1 + modules/bcftools/filter/meta.yml | 1 + modules/bcftools/index/meta.yml | 2 +- modules/bcftools/isec/meta.yml | 1 + modules/bcftools/merge/meta.yml | 1 + modules/bcftools/mpileup/meta.yml | 1 + modules/bcftools/norm/meta.yml | 1 + modules/bcftools/query/meta.yml | 1 + modules/bcftools/reheader/meta.yml | 2 +- modules/bcftools/stats/meta.yml | 1 + modules/bcftools/view/meta.yml | 1 + modules/bedtools/bamtobed/meta.yml | 1 + modules/bedtools/complement/meta.yml | 1 + modules/bedtools/genomecov/meta.yml | 1 + modules/bedtools/getfasta/meta.yml | 1 + modules/bedtools/intersect/meta.yml | 1 + modules/bedtools/makewindows/meta.yml | 2 +- modules/bedtools/maskfasta/meta.yml | 1 + modules/bedtools/merge/meta.yml | 1 + modules/bedtools/slop/meta.yml | 1 + modules/bedtools/sort/meta.yml | 1 + modules/bedtools/subtract/meta.yml | 1 + modules/bismark/align/meta.yml | 1 + modules/bismark/deduplicate/meta.yml | 1 + modules/bismark/genomepreparation/meta.yml | 1 + modules/bismark/methylationextractor/meta.yml | 1 + modules/bismark/report/meta.yml | 1 + modules/bismark/summary/meta.yml | 1 + modules/blast/blastn/meta.yml | 1 + modules/blast/makeblastdb/meta.yml | 1 + modules/bowtie/align/meta.yml | 1 + modules/bowtie/build/meta.yml | 1 + modules/bowtie2/align/meta.yml | 1 + modules/bowtie2/build/meta.yml | 1 + modules/bwa/aln/meta.yml | 2 +- modules/bwa/index/meta.yml | 1 + modules/bwa/mem/meta.yml | 1 + modules/bwa/sampe/meta.yml | 2 +- modules/bwa/samse/meta.yml | 2 +- modules/bwamem2/index/meta.yml | 1 + modules/bwamem2/mem/meta.yml | 1 + modules/bwameth/align/meta.yml | 1 + modules/bwameth/index/meta.yml | 1 + modules/cat/cat/meta.yml | 2 +- modules/cat/fastq/meta.yml | 1 + modules/cnvkit/meta.yml | 1 + modules/cooler/digest/meta.yml | 2 +- modules/cooler/dump/meta.yml | 2 +- modules/custom/dumpsoftwareversions/meta.yml | 2 +- modules/cutadapt/meta.yml | 1 + modules/delly/call/meta.yml | 2 +- modules/dshbio/exportsegments/meta.yml | 1 + modules/dshbio/filterbed/meta.yml | 1 + modules/dshbio/filtergff3/meta.yml | 1 + modules/dshbio/splitbed/meta.yml | 1 + modules/dshbio/splitgff3/meta.yml | 1 + modules/ensemblvep/meta.yml | 1 + modules/expansionhunter/meta.yml | 2 +- modules/fastp/meta.yml | 1 + modules/fastqc/meta.yml | 1 + modules/gatk4/applybqsr/meta.yml | 1 + modules/gatk4/baserecalibrator/meta.yml | 1 + modules/gatk4/bedtointervallist/meta.yml | 1 + modules/gatk4/calculatecontamination/meta.yml | 1 + modules/gatk4/createsequencedictionary/meta.yml | 2 ++ modules/gatk4/fastqtosam/meta.yml | 4 ++-- modules/gatk4/getpileupsummaries/meta.yml | 1 + modules/gatk4/haplotypecaller/meta.yml | 1 + modules/gatk4/intervallisttools/meta.yml | 1 + modules/gatk4/learnreadorientationmodel/meta.yml | 1 + modules/gatk4/markduplicates/meta.yml | 2 +- modules/gatk4/mergebamalignment/meta.yml | 1 + modules/gatk4/mergevcfs/meta.yml | 1 + modules/gatk4/mutect2/meta.yml | 1 + modules/gatk4/revertsam/meta.yml | 1 + modules/gatk4/samtofastq/meta.yml | 1 + modules/gatk4/splitncigarreads/meta.yml | 1 + modules/gatk4/variantfiltration/meta.yml | 1 + modules/genmap/index/meta.yml | 2 +- modules/genmap/mappability/meta.yml | 2 +- modules/glnexus/meta.yml | 2 +- modules/graphmap2/align/meta.yml | 1 + modules/graphmap2/index/meta.yml | 1 + modules/gubbins/meta.yml | 1 + modules/gunzip/meta.yml | 1 + modules/hmmer/hmmalign/meta.yml | 2 +- modules/homer/annotatepeaks/meta.yml | 1 + modules/homer/findpeaks/meta.yml | 1 + modules/homer/maketagdirectory/meta.yml | 1 + modules/homer/makeucscfile/meta.yml | 1 + modules/ismapper/meta.yml | 2 +- modules/isoseq3/cluster/meta.yml | 2 +- modules/isoseq3/refine/meta.yml | 2 +- modules/ivar/consensus/meta.yml | 1 + modules/ivar/trim/meta.yml | 1 + modules/ivar/variants/meta.yml | 1 + modules/kallisto/index/meta.yml | 2 +- modules/kraken2/kraken2/meta.yml | 1 + modules/lima/meta.yml | 2 +- modules/methyldackel/extract/meta.yml | 1 + modules/methyldackel/mbias/meta.yml | 1 + modules/minia/meta.yml | 1 + modules/minimap2/align/meta.yml | 1 + modules/minimap2/index/meta.yml | 1 + modules/mosdepth/meta.yml | 1 + modules/multiqc/meta.yml | 1 + modules/nanolyse/meta.yml | 1 + modules/nanoplot/meta.yml | 1 + modules/optitype/meta.yml | 2 +- modules/pangolin/meta.yml | 1 + modules/pbbam/pbmerge/meta.yml | 2 +- modules/pbccs/meta.yml | 2 +- modules/picard/collectmultiplemetrics/meta.yml | 1 + modules/picard/collectwgsmetrics/meta.yml | 1 + modules/picard/markduplicates/meta.yml | 1 + modules/picard/mergesamfiles/meta.yml | 1 + modules/picard/sortsam/meta.yml | 1 + modules/qcat/meta.yml | 1 + modules/qualimap/bamqc/meta.yml | 1 + modules/quast/meta.yml | 3 ++- modules/rsem/calculateexpression/meta.yml | 1 + modules/rsem/preparereference/meta.yml | 1 + modules/rseqc/bamstat/meta.yml | 1 + modules/rseqc/inferexperiment/meta.yml | 1 + modules/rseqc/innerdistance/meta.yml | 1 + modules/rseqc/junctionannotation/meta.yml | 1 + modules/rseqc/junctionsaturation/meta.yml | 1 + modules/rseqc/readdistribution/meta.yml | 1 + modules/rseqc/readduplication/meta.yml | 1 + modules/salmon/index/meta.yml | 1 + modules/salmon/quant/meta.yml | 1 + modules/samtools/ampliconclip/meta.yml | 1 + modules/samtools/faidx/meta.yml | 1 + modules/samtools/fastq/meta.yml | 1 + modules/samtools/flagstat/meta.yml | 1 + modules/samtools/idxstats/meta.yml | 1 + modules/samtools/index/meta.yml | 1 + modules/samtools/merge/meta.yml | 1 + modules/samtools/mpileup/meta.yml | 1 + modules/samtools/sort/meta.yml | 1 + modules/samtools/stats/meta.yml | 1 + modules/samtools/view/meta.yml | 1 + modules/seacr/callpeak/meta.yml | 1 + modules/seqkit/split2/meta.yml | 1 + modules/sequenzautils/bam2seqz/meta.yml | 2 +- modules/sequenzautils/gcwiggle/meta.yml | 1 + modules/seqwish/induce/meta.yml | 1 + modules/snpeff/meta.yml | 1 + modules/snpsites/meta.yml | 1 + modules/sratools/fasterqdump/meta.yml | 2 +- modules/sratools/prefetch/meta.yml | 2 +- modules/star/align/meta.yml | 1 + modules/star/genomegenerate/meta.yml | 1 + modules/stringtie/merge/meta.yml | 1 + modules/stringtie/stringtie/meta.yml | 1 + modules/tabix/bgzip/meta.yml | 1 + modules/tabix/bgziptabix/meta.yml | 1 + modules/tabix/tabix/meta.yml | 1 + modules/tiddit/sv/meta.yml | 1 + modules/trimgalore/meta.yml | 1 + modules/untar/meta.yml | 1 + modules/unzip/meta.yml | 2 +- modules/variantbam/meta.yml | 2 +- 165 files changed, 168 insertions(+), 33 deletions(-) diff --git a/modules/bandage/image/meta.yml b/modules/bandage/image/meta.yml index 65f47664..1c2b9840 100644 --- a/modules/bandage/image/meta.yml +++ b/modules/bandage/image/meta.yml @@ -11,6 +11,7 @@ tools: Bandage - a Bioinformatics Application for Navigating De novo Assembly Graphs Easily homepage: https://github.com/rrwick/Bandage documentation: https://github.com/rrwick/Bandage + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bcftools/concat/meta.yml b/modules/bcftools/concat/meta.yml index e394d18d..b2848595 100644 --- a/modules/bcftools/concat/meta.yml +++ b/modules/bcftools/concat/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/consensus/meta.yml b/modules/bcftools/consensus/meta.yml index 30f4910a..761115a6 100644 --- a/modules/bcftools/consensus/meta.yml +++ b/modules/bcftools/consensus/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/filter/meta.yml b/modules/bcftools/filter/meta.yml index 433b203d..72d28bf0 100644 --- a/modules/bcftools/filter/meta.yml +++ b/modules/bcftools/filter/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/index/meta.yml b/modules/bcftools/index/meta.yml index 6fc7df17..0d5dd3ef 100644 --- a/modules/bcftools/index/meta.yml +++ b/modules/bcftools/index/meta.yml @@ -13,7 +13,7 @@ tools: documentation: https://samtools.github.io/bcftools/howtos/index.html tool_dev_url: https://github.com/samtools/bcftools doi: "10.1093/gigascience/giab008" - licence: ['GPL'] + licence: ['MIT', 'GPL-3.0-or-later'] input: - meta: diff --git a/modules/bcftools/isec/meta.yml b/modules/bcftools/isec/meta.yml index 6a482257..d0be6dce 100644 --- a/modules/bcftools/isec/meta.yml +++ b/modules/bcftools/isec/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/merge/meta.yml b/modules/bcftools/merge/meta.yml index 056ea37d..c7e3a280 100644 --- a/modules/bcftools/merge/meta.yml +++ b/modules/bcftools/merge/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/mpileup/meta.yml b/modules/bcftools/mpileup/meta.yml index 49f02a40..c31180ee 100644 --- a/modules/bcftools/mpileup/meta.yml +++ b/modules/bcftools/mpileup/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/norm/meta.yml b/modules/bcftools/norm/meta.yml index 760186dc..27978a53 100644 --- a/modules/bcftools/norm/meta.yml +++ b/modules/bcftools/norm/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/query/meta.yml b/modules/bcftools/query/meta.yml index 12b11216..e450f73e 100644 --- a/modules/bcftools/query/meta.yml +++ b/modules/bcftools/query/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/reheader/meta.yml b/modules/bcftools/reheader/meta.yml index 6d7c9f97..ee8cba32 100644 --- a/modules/bcftools/reheader/meta.yml +++ b/modules/bcftools/reheader/meta.yml @@ -11,7 +11,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://samtools.github.io/bcftools/bcftools.html#reheader doi: 10.1093/gigascience/giab008 - licence: ['GPL'] + licence: ['MIT'] input: - meta: diff --git a/modules/bcftools/stats/meta.yml b/modules/bcftools/stats/meta.yml index 78294ff7..505bf729 100644 --- a/modules/bcftools/stats/meta.yml +++ b/modules/bcftools/stats/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bcftools/view/meta.yml b/modules/bcftools/view/meta.yml index 638a4e4f..df5b0f8f 100644 --- a/modules/bcftools/view/meta.yml +++ b/modules/bcftools/view/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: http://www.htslib.org/doc/bcftools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/bamtobed/meta.yml b/modules/bedtools/bamtobed/meta.yml index 0eaf3e2a..e8c67047 100644 --- a/modules/bedtools/bamtobed/meta.yml +++ b/modules/bedtools/bamtobed/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/complement.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/complement/meta.yml b/modules/bedtools/complement/meta.yml index 02ddca29..2ad8749c 100644 --- a/modules/bedtools/complement/meta.yml +++ b/modules/bedtools/complement/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/complement.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/genomecov/meta.yml b/modules/bedtools/genomecov/meta.yml index bc49ab03..3deb4d6b 100644 --- a/modules/bedtools/genomecov/meta.yml +++ b/modules/bedtools/genomecov/meta.yml @@ -9,6 +9,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/genomecov.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/getfasta/meta.yml b/modules/bedtools/getfasta/meta.yml index 89fbea54..38715c3d 100644 --- a/modules/bedtools/getfasta/meta.yml +++ b/modules/bedtools/getfasta/meta.yml @@ -9,6 +9,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/intersect.html + licence: ['MIT'] input: - bed: type: file diff --git a/modules/bedtools/intersect/meta.yml b/modules/bedtools/intersect/meta.yml index a14bf515..3bcb6ece 100644 --- a/modules/bedtools/intersect/meta.yml +++ b/modules/bedtools/intersect/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/intersect.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/makewindows/meta.yml b/modules/bedtools/makewindows/meta.yml index 7d86e127..a536d75f 100644 --- a/modules/bedtools/makewindows/meta.yml +++ b/modules/bedtools/makewindows/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://bedtools.readthedocs.io/en/latest/content/tools/makewindows.html tool_dev_url: None doi: "10.1093/bioinformatics/btq033" - licence: ['GPL v2'] + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/maskfasta/meta.yml b/modules/bedtools/maskfasta/meta.yml index 428d6f57..0b7aa3ed 100644 --- a/modules/bedtools/maskfasta/meta.yml +++ b/modules/bedtools/maskfasta/meta.yml @@ -9,6 +9,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/intersect.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/merge/meta.yml b/modules/bedtools/merge/meta.yml index 39e79cbd..40a42b7b 100644 --- a/modules/bedtools/merge/meta.yml +++ b/modules/bedtools/merge/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/merge.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/slop/meta.yml b/modules/bedtools/slop/meta.yml index 709d88c3..a4713936 100644 --- a/modules/bedtools/slop/meta.yml +++ b/modules/bedtools/slop/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/slop.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/sort/meta.yml b/modules/bedtools/sort/meta.yml index a0332787..5b8b41d7 100644 --- a/modules/bedtools/sort/meta.yml +++ b/modules/bedtools/sort/meta.yml @@ -8,6 +8,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/sort.html + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bedtools/subtract/meta.yml b/modules/bedtools/subtract/meta.yml index e13057bb..b9245a55 100644 --- a/modules/bedtools/subtract/meta.yml +++ b/modules/bedtools/subtract/meta.yml @@ -10,6 +10,7 @@ tools: description: | A set of tools for genomic analysis tasks, specifically enabling genome arithmetic (merge, count, complement) on various file types. documentation: https://bedtools.readthedocs.io/en/latest/content/tools/subtract.html + licence: ['MIT'] input: - meta: diff --git a/modules/bismark/align/meta.yml b/modules/bismark/align/meta.yml index 92a3b1ec..79948e1c 100644 --- a/modules/bismark/align/meta.yml +++ b/modules/bismark/align/meta.yml @@ -17,6 +17,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bismark/deduplicate/meta.yml b/modules/bismark/deduplicate/meta.yml index d19a915f..9e28cd22 100644 --- a/modules/bismark/deduplicate/meta.yml +++ b/modules/bismark/deduplicate/meta.yml @@ -19,6 +19,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bismark/genomepreparation/meta.yml b/modules/bismark/genomepreparation/meta.yml index 7712d7c2..2a17f1fb 100644 --- a/modules/bismark/genomepreparation/meta.yml +++ b/modules/bismark/genomepreparation/meta.yml @@ -19,6 +19,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - fasta: type: file diff --git a/modules/bismark/methylationextractor/meta.yml b/modules/bismark/methylationextractor/meta.yml index 9fa0f4f4..602fc06d 100644 --- a/modules/bismark/methylationextractor/meta.yml +++ b/modules/bismark/methylationextractor/meta.yml @@ -18,6 +18,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bismark/report/meta.yml b/modules/bismark/report/meta.yml index 889d1227..e849e109 100644 --- a/modules/bismark/report/meta.yml +++ b/modules/bismark/report/meta.yml @@ -16,6 +16,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bismark/summary/meta.yml b/modules/bismark/summary/meta.yml index 10f71fe4..0494bb8e 100644 --- a/modules/bismark/summary/meta.yml +++ b/modules/bismark/summary/meta.yml @@ -19,6 +19,7 @@ tools: homepage: https://github.com/FelixKrueger/Bismark documentation: https://github.com/FelixKrueger/Bismark/tree/master/Docs doi: 10.1093/bioinformatics/btr167 + licence: ['GPL-3.0-or-later'] input: - bam: type: file diff --git a/modules/blast/blastn/meta.yml b/modules/blast/blastn/meta.yml index d19d3df6..39acb663 100644 --- a/modules/blast/blastn/meta.yml +++ b/modules/blast/blastn/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://blast.ncbi.nlm.nih.gov/Blast.cgi documentation: https://blast.ncbi.nlm.nih.gov/Blast.cgi?CMD=Web&PAGE_TYPE=Blastdocs doi: 10.1016/S0022-2836(05)80360-2 + licence: ['US-Government-Work'] input: - meta: type: map diff --git a/modules/blast/makeblastdb/meta.yml b/modules/blast/makeblastdb/meta.yml index 545cc2a0..c9d18cba 100644 --- a/modules/blast/makeblastdb/meta.yml +++ b/modules/blast/makeblastdb/meta.yml @@ -11,6 +11,7 @@ tools: homepage: https://blast.ncbi.nlm.nih.gov/Blast.cgi documentation: https://blast.ncbi.nlm.nih.gov/Blast.cgi?CMD=Web&PAGE_TYPE=Blastdocs doi: 10.1016/S0022-2836(05)80360-2 + licence: ['US-Government-Work'] input: - fasta: type: file diff --git a/modules/bowtie/align/meta.yml b/modules/bowtie/align/meta.yml index 73c65631..07d480be 100644 --- a/modules/bowtie/align/meta.yml +++ b/modules/bowtie/align/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://bowtie-bio.sourceforge.net/index.shtml documentation: http://bowtie-bio.sourceforge.net/manual.shtml arxiv: arXiv:1303.3997 + licence: ['Artistic-2.0'] input: - meta: type: map diff --git a/modules/bowtie/build/meta.yml b/modules/bowtie/build/meta.yml index aa39f32e..016adcfe 100644 --- a/modules/bowtie/build/meta.yml +++ b/modules/bowtie/build/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://bowtie-bio.sourceforge.net/index.shtml documentation: http://bowtie-bio.sourceforge.net/manual.shtml arxiv: arXiv:1303.3997 + licence: ['Artistic-2.0'] input: - fasta: type: file diff --git a/modules/bowtie2/align/meta.yml b/modules/bowtie2/align/meta.yml index f9d54d87..77c9e397 100644 --- a/modules/bowtie2/align/meta.yml +++ b/modules/bowtie2/align/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml doi: 10.1038/nmeth.1923 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bowtie2/build/meta.yml b/modules/bowtie2/build/meta.yml index 4531d079..ecc54e9b 100644 --- a/modules/bowtie2/build/meta.yml +++ b/modules/bowtie2/build/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml doi: 10.1038/nmeth.1923 + licence: ['GPL-3.0-or-later'] input: - fasta: type: file diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml index d4a2b19d..d2424a5f 100644 --- a/modules/bwa/aln/meta.yml +++ b/modules/bwa/aln/meta.yml @@ -17,7 +17,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://bio-bwa.sourceforge.net/ doi: "10.1093/bioinformatics/btp324" - licence: ['GPL v3'] + licence: ['GPL-3.0-or-later'] input: - meta: diff --git a/modules/bwa/index/meta.yml b/modules/bwa/index/meta.yml index c3c0a8d8..11d62df3 100644 --- a/modules/bwa/index/meta.yml +++ b/modules/bwa/index/meta.yml @@ -13,6 +13,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 + licence: ['GPL-3.0-or-later'] input: - fasta: type: file diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 66238507..61eaddef 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -16,6 +16,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/bwa/sampe/meta.yml b/modules/bwa/sampe/meta.yml index ec2dfff5..7b530a03 100644 --- a/modules/bwa/sampe/meta.yml +++ b/modules/bwa/sampe/meta.yml @@ -18,7 +18,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://bio-bwa.sourceforge.net/ doi: "10.1093/bioinformatics/btp324" - licence: ['GPL v3'] + licence: ['GPL-3.0-or-later'] input: - meta: diff --git a/modules/bwa/samse/meta.yml b/modules/bwa/samse/meta.yml index 1e7ef335..9a9ecb39 100644 --- a/modules/bwa/samse/meta.yml +++ b/modules/bwa/samse/meta.yml @@ -19,7 +19,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://bio-bwa.sourceforge.net/ doi: "10.1093/bioinformatics/btp324" - licence: ['GPL v3'] + licence: ['GPL-3.0-or-later'] input: - meta: diff --git a/modules/bwamem2/index/meta.yml b/modules/bwamem2/index/meta.yml index 1b36be8d..e0f6014c 100644 --- a/modules/bwamem2/index/meta.yml +++ b/modules/bwamem2/index/meta.yml @@ -12,6 +12,7 @@ tools: a large reference genome, such as the human genome. homepage: https://github.com/bwa-mem2/bwa-mem2 documentation: https://github.com/bwa-mem2/bwa-mem2#usage + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 2fb4449e..58a35e08 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -16,6 +16,7 @@ tools: homepage: http://bio-bwa.sourceforge.net/ documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bwameth/align/meta.yml b/modules/bwameth/align/meta.yml index 11fc9949..1cd66237 100644 --- a/modules/bwameth/align/meta.yml +++ b/modules/bwameth/align/meta.yml @@ -19,6 +19,7 @@ tools: homepage: https://github.com/brentp/bwa-meth documentation: https://github.com/brentp/bwa-meth arxiv: arXiv:1401.1129 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/bwameth/index/meta.yml b/modules/bwameth/index/meta.yml index c96fbfbb..352dfd0f 100644 --- a/modules/bwameth/index/meta.yml +++ b/modules/bwameth/index/meta.yml @@ -15,6 +15,7 @@ tools: homepage: https://github.com/brentp/bwa-meth documentation: https://github.com/brentp/bwa-meth arxiv: arXiv:1401.1129 + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml index f1a46ca3..b3f370ee 100644 --- a/modules/cat/cat/meta.yml +++ b/modules/cat/cat/meta.yml @@ -10,7 +10,7 @@ tools: homepage: None documentation: https://man7.org/linux/man-pages/man1/cat.1.html tool_dev_url: None - + licence: ['GPL-3.0-or-later'] input: - files_in: type: file diff --git a/modules/cat/fastq/meta.yml b/modules/cat/fastq/meta.yml index 6c6c397e..1992fa34 100644 --- a/modules/cat/fastq/meta.yml +++ b/modules/cat/fastq/meta.yml @@ -8,6 +8,7 @@ tools: description: | The cat utility reads files sequentially, writing them to the standard output. documentation: https://www.gnu.org/software/coreutils/manual/html_node/cat-invocation.html + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/meta.yml index 30c1b588..3e760d16 100755 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/meta.yml @@ -10,6 +10,7 @@ tools: CNVkit is a Python library and command-line software toolkit to infer and visualize copy number from high-throughput DNA sequencing data. It is designed for use with hybrid capture, including both whole-exome and custom target panels, and short-read sequencing platforms such as Illumina and Ion Torrent. homepage: https://cnvkit.readthedocs.io/en/stable/index.html documentation: https://cnvkit.readthedocs.io/en/stable/index.html + licence: ['Apache-2.0'] params: - outdir: type: string diff --git a/modules/cooler/digest/meta.yml b/modules/cooler/digest/meta.yml index 4fb85e4f..6ce95ad7 100644 --- a/modules/cooler/digest/meta.yml +++ b/modules/cooler/digest/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://cooler.readthedocs.io/en/latest/index.html tool_dev_url: https://github.com/open2c/cooler doi: "10.1093/bioinformatics/btz540" - licence: ['BSD-3-clause'] + licence: ['BSD-3-Clause'] input: - fasta: diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index 1d98a62e..659b06a1 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://cooler.readthedocs.io/en/latest/index.html tool_dev_url: https://github.com/open2c/cooler doi: "10.1093/bioinformatics/btz540" - licence: ['BSD-3-clause'] + licence: ['BSD-3-Clause'] input: - meta: diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml index 8d4a6ed4..c8310e35 100644 --- a/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -8,7 +8,7 @@ tools: description: Custom module used to dump software versions within the nf-core pipeline template homepage: https://github.com/nf-core/tools documentation: https://github.com/nf-core/tools - + licence: ['MIT'] input: - versions: type: file diff --git a/modules/cutadapt/meta.yml b/modules/cutadapt/meta.yml index 62c2ccde..b4e6f6e7 100644 --- a/modules/cutadapt/meta.yml +++ b/modules/cutadapt/meta.yml @@ -11,6 +11,7 @@ tools: Cutadapt finds and removes adapter sequences, primers, poly-A tails and other types of unwanted sequence from your high-throughput sequencing reads. documentation: https://cutadapt.readthedocs.io/en/stable/index.html doi: DOI:10.14806/ej.17.1.200 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/delly/call/meta.yml b/modules/delly/call/meta.yml index 75e5c9c2..56539188 100644 --- a/modules/delly/call/meta.yml +++ b/modules/delly/call/meta.yml @@ -13,7 +13,7 @@ tools: documentation: https://github.com/dellytools/delly/blob/master/README.md tool_dev_url: None doi: "DOI:10.1093/bioinformatics/bts378" - licence: ["BSD-3-clause"] + licence: ['BSD-3-Clause'] input: - meta: diff --git a/modules/dshbio/exportsegments/meta.yml b/modules/dshbio/exportsegments/meta.yml index b9b145df..da5455c7 100644 --- a/modules/dshbio/exportsegments/meta.yml +++ b/modules/dshbio/exportsegments/meta.yml @@ -12,6 +12,7 @@ tools: or later. homepage: https://github.com/heuermh/dishevelled-bio documentation: https://github.com/heuermh/dishevelled-bio + licence: ['LGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/dshbio/filterbed/meta.yml b/modules/dshbio/filterbed/meta.yml index 0e09b392..77054be4 100644 --- a/modules/dshbio/filterbed/meta.yml +++ b/modules/dshbio/filterbed/meta.yml @@ -10,6 +10,7 @@ tools: or later. homepage: https://github.com/heuermh/dishevelled-bio documentation: https://github.com/heuermh/dishevelled-bio + licence: ['LGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/dshbio/filtergff3/meta.yml b/modules/dshbio/filtergff3/meta.yml index 2fd916fa..aa1bce43 100644 --- a/modules/dshbio/filtergff3/meta.yml +++ b/modules/dshbio/filtergff3/meta.yml @@ -10,6 +10,7 @@ tools: or later. homepage: https://github.com/heuermh/dishevelled-bio documentation: https://github.com/heuermh/dishevelled-bio + licence: ['LGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/dshbio/splitbed/meta.yml b/modules/dshbio/splitbed/meta.yml index 16aec66b..a35ea25f 100644 --- a/modules/dshbio/splitbed/meta.yml +++ b/modules/dshbio/splitbed/meta.yml @@ -10,6 +10,7 @@ tools: or later. homepage: https://github.com/heuermh/dishevelled-bio documentation: https://github.com/heuermh/dishevelled-bio + licence: ['LGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/dshbio/splitgff3/meta.yml b/modules/dshbio/splitgff3/meta.yml index 36e37862..fdbbe16a 100644 --- a/modules/dshbio/splitgff3/meta.yml +++ b/modules/dshbio/splitgff3/meta.yml @@ -10,6 +10,7 @@ tools: or later. homepage: https://github.com/heuermh/dishevelled-bio documentation: https://github.com/heuermh/dishevelled-bio + licence: ['LGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/ensemblvep/meta.yml b/modules/ensemblvep/meta.yml index 9ec4f6a4..1b819227 100644 --- a/modules/ensemblvep/meta.yml +++ b/modules/ensemblvep/meta.yml @@ -9,6 +9,7 @@ tools: or structural variants) on genes, transcripts, and protein sequence, as well as regulatory regions. homepage: https://www.ensembl.org/info/docs/tools/vep/index.html documentation: https://www.ensembl.org/info/docs/tools/vep/script/index.html + licence: ['Apache-2.0'] params: - use_cache: type: boolean diff --git a/modules/expansionhunter/meta.yml b/modules/expansionhunter/meta.yml index 54bb3293..17d72bb4 100644 --- a/modules/expansionhunter/meta.yml +++ b/modules/expansionhunter/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/Illumina/ExpansionHunter/blob/master/docs/01_Introduction.md tool_dev_url: None doi: "10.1093/bioinformatics/btz431" - licence: ['Apache v2.0'] + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index cfef4a99..6e133871 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -10,6 +10,7 @@ tools: A tool designed to provide fast all-in-one preprocessing for FastQ files. This tool is developed in C++ with multithreading supported to afford high performance. documentation: https://github.com/OpenGene/fastp doi: https://doi.org/10.1093/bioinformatics/bty560 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/fastqc/meta.yml b/modules/fastqc/meta.yml index 0ae08aee..b09553a3 100644 --- a/modules/fastqc/meta.yml +++ b/modules/fastqc/meta.yml @@ -15,6 +15,7 @@ tools: overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ + licence: ['GPL-2.0-only'] input: - meta: type: map diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index be815bd8..e09e8c52 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 068f8ef1..d579d9e5 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -11,6 +11,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: diff --git a/modules/gatk4/bedtointervallist/meta.yml b/modules/gatk4/bedtointervallist/meta.yml index aacca1a6..910f9552 100644 --- a/modules/gatk4/bedtointervallist/meta.yml +++ b/modules/gatk4/bedtointervallist/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/calculatecontamination/meta.yml b/modules/gatk4/calculatecontamination/meta.yml index 0d1b9b85..8c843732 100644 --- a/modules/gatk4/calculatecontamination/meta.yml +++ b/modules/gatk4/calculatecontamination/meta.yml @@ -16,6 +16,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/createsequencedictionary/meta.yml b/modules/gatk4/createsequencedictionary/meta.yml index 90f415a2..54f479b3 100644 --- a/modules/gatk4/createsequencedictionary/meta.yml +++ b/modules/gatk4/createsequencedictionary/meta.yml @@ -12,6 +12,8 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] + input: - fasta: type: file diff --git a/modules/gatk4/fastqtosam/meta.yml b/modules/gatk4/fastqtosam/meta.yml index ab56ec53..8bd9eed5 100644 --- a/modules/gatk4/fastqtosam/meta.yml +++ b/modules/gatk4/fastqtosam/meta.yml @@ -1,5 +1,5 @@ name: gatk4_fastqtosam -description: Converts FastQ file to BAM format +description: Converts FastQ file to SAM/BAM format keywords: - bam - fastq @@ -14,7 +14,7 @@ tools: documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s tool_dev_url: https://github.com/broadinstitute/gatk doi: "10.1158/1538-7445.AM2017-3590" - licence: ['BSD-3-clause'] + licence: ['MIT'] input: - meta: diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index bda0ccb1..70158a8d 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -15,6 +15,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/haplotypecaller/meta.yml b/modules/gatk4/haplotypecaller/meta.yml index 73adc950..6a1bd7ed 100644 --- a/modules/gatk4/haplotypecaller/meta.yml +++ b/modules/gatk4/haplotypecaller/meta.yml @@ -13,6 +13,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/intervallisttools/meta.yml b/modules/gatk4/intervallisttools/meta.yml index 14f7db35..9e2d994f 100644 --- a/modules/gatk4/intervallisttools/meta.yml +++ b/modules/gatk4/intervallisttools/meta.yml @@ -14,6 +14,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/learnreadorientationmodel/meta.yml b/modules/gatk4/learnreadorientationmodel/meta.yml index c15b48cb..4eff6939 100644 --- a/modules/gatk4/learnreadorientationmodel/meta.yml +++ b/modules/gatk4/learnreadorientationmodel/meta.yml @@ -15,6 +15,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/markduplicates/meta.yml b/modules/gatk4/markduplicates/meta.yml index bd5ed5e7..59aaad4d 100644 --- a/modules/gatk4/markduplicates/meta.yml +++ b/modules/gatk4/markduplicates/meta.yml @@ -13,7 +13,7 @@ tools: documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037052812-MarkDuplicates-Picard- tool_dev_url: https://github.com/broadinstitute/gatk doi: 10.1158/1538-7445.AM2017-3590 - licence: ['BSD-3-clause'] + licence: ['MIT'] input: - meta: diff --git a/modules/gatk4/mergebamalignment/meta.yml b/modules/gatk4/mergebamalignment/meta.yml index 7823c458..c66c78db 100644 --- a/modules/gatk4/mergebamalignment/meta.yml +++ b/modules/gatk4/mergebamalignment/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/mergevcfs/meta.yml b/modules/gatk4/mergevcfs/meta.yml index b20d7bb5..597f9ec6 100644 --- a/modules/gatk4/mergevcfs/meta.yml +++ b/modules/gatk4/mergevcfs/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 182b6712..4c38a049 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -14,6 +14,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/gatk4/revertsam/meta.yml b/modules/gatk4/revertsam/meta.yml index 619450d3..b52dcb36 100644 --- a/modules/gatk4/revertsam/meta.yml +++ b/modules/gatk4/revertsam/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/samtofastq/meta.yml b/modules/gatk4/samtofastq/meta.yml index 20033ec2..de4624b5 100644 --- a/modules/gatk4/samtofastq/meta.yml +++ b/modules/gatk4/samtofastq/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index 9eefb545..f287ede4 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index 4dbd71fe..6d4983a6 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] input: - meta: type: map diff --git a/modules/genmap/index/meta.yml b/modules/genmap/index/meta.yml index adecf3c0..2ab0910d 100644 --- a/modules/genmap/index/meta.yml +++ b/modules/genmap/index/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://github.com/cpockrandt/genmap tool_dev_url: https://github.com/cpockrandt/genmap doi: "10.1093/bioinformatics/btaa222" - licence: ['BSD'] + licence: ['BSD-3-Clause'] input: - fasta: diff --git a/modules/genmap/mappability/meta.yml b/modules/genmap/mappability/meta.yml index c28cbd6d..d2835d92 100644 --- a/modules/genmap/mappability/meta.yml +++ b/modules/genmap/mappability/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://github.com/cpockrandt/genmap tool_dev_url: https://github.com/cpockrandt/genmap doi: "10.1093/bioinformatics/btaa222" - licence: ['BSD'] + licence: ['BSD-3-Clause'] input: - fasta: diff --git a/modules/glnexus/meta.yml b/modules/glnexus/meta.yml index aec25bb0..5ba17cae 100644 --- a/modules/glnexus/meta.yml +++ b/modules/glnexus/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/dnanexus-rnd/GLnexus/wiki/Getting-Started tool_dev_url: None doi: https://doi.org/10.1101/343970 - licence: ['Apache License 2.0'] + licence: ['Apache-2.0'] input: - meta: diff --git a/modules/graphmap2/align/meta.yml b/modules/graphmap2/align/meta.yml index a4acb648..9fb1507a 100644 --- a/modules/graphmap2/align/meta.yml +++ b/modules/graphmap2/align/meta.yml @@ -12,6 +12,7 @@ tools: A versatile pairwise aligner for genomic and spliced nucleotide sequences. homepage: https://github.com/lbcb-sci/graphmap2 documentation: https://github.com/lbcb-sci/graphmap2#graphmap2---a-highly-sensitive-and-accurate-mapper-for-long-error-prone-reads + licence: ['MIT'] input: - meta: type: map diff --git a/modules/graphmap2/index/meta.yml b/modules/graphmap2/index/meta.yml index e7bd6cb6..92a0a3d7 100644 --- a/modules/graphmap2/index/meta.yml +++ b/modules/graphmap2/index/meta.yml @@ -10,6 +10,7 @@ tools: A versatile pairwise aligner for genomic and spliced nucleotide sequences. homepage: https://github.com/lbcb-sci/graphmap2 documentation: https://github.com/lbcb-sci/graphmap2#graphmap2---a-highly-sensitive-and-accurate-mapper-for-long-error-prone-reads + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/gubbins/meta.yml b/modules/gubbins/meta.yml index 84b930a2..f73e2bb0 100644 --- a/modules/gubbins/meta.yml +++ b/modules/gubbins/meta.yml @@ -2,6 +2,7 @@ name: gubbins description: Gubbins (Genealogies Unbiased By recomBinations In Nucleotide Sequences) is an algorithm that iteratively identifies loci containing elevated densities of base substitutions while concurrently constructing a phylogeny based on the putative point mutations outside of these regions. +licence: ['GPL-2.0-only'] keywords: - recombination - alignment diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index dbec5534..3482f0d2 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -8,6 +8,7 @@ tools: description: | gzip is a file format and a software application used for file compression and decompression. documentation: https://www.gnu.org/software/gzip/manual/gzip.html + licence: ['GPL-3.0-or-later'] input: - archive: type: file diff --git a/modules/hmmer/hmmalign/meta.yml b/modules/hmmer/hmmalign/meta.yml index c9a50bc2..58dc6b92 100644 --- a/modules/hmmer/hmmalign/meta.yml +++ b/modules/hmmer/hmmalign/meta.yml @@ -9,7 +9,7 @@ tools: documentation: http://hmmer.org/documentation.html tool_dev_url: None doi: "http://dx.doi.org/10.1371/journal.pcbi.1002195" - licence: ['BSD'] + licence: ['BSD-3-Clause'] input: - meta: diff --git a/modules/homer/annotatepeaks/meta.yml b/modules/homer/annotatepeaks/meta.yml index 39fe4197..c3ab9460 100644 --- a/modules/homer/annotatepeaks/meta.yml +++ b/modules/homer/annotatepeaks/meta.yml @@ -10,6 +10,7 @@ tools: HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. documentation: http://homer.ucsd.edu/homer/ doi: 10.1016/j.molcel.2010.05.004. + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/homer/findpeaks/meta.yml b/modules/homer/findpeaks/meta.yml index d1450f3c..2aa8db26 100644 --- a/modules/homer/findpeaks/meta.yml +++ b/modules/homer/findpeaks/meta.yml @@ -9,6 +9,7 @@ tools: HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. documentation: http://homer.ucsd.edu/homer/ doi: 10.1016/j.molcel.2010.05.004. + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml index 9a88c2e1..802320f9 100644 --- a/modules/homer/maketagdirectory/meta.yml +++ b/modules/homer/maketagdirectory/meta.yml @@ -9,6 +9,7 @@ tools: HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. documentation: http://homer.ucsd.edu/homer/ doi: 10.1016/j.molcel.2010.05.004. + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/homer/makeucscfile/meta.yml b/modules/homer/makeucscfile/meta.yml index d9123c7e..68d5fcd4 100644 --- a/modules/homer/makeucscfile/meta.yml +++ b/modules/homer/makeucscfile/meta.yml @@ -10,6 +10,7 @@ tools: HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. documentation: http://homer.ucsd.edu/homer/ doi: 10.1016/j.molcel.2010.05.004. + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/ismapper/meta.yml b/modules/ismapper/meta.yml index 4ca2450a..810c1674 100644 --- a/modules/ismapper/meta.yml +++ b/modules/ismapper/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/jhawkey/IS_mapper tool_dev_url: https://github.com/jhawkey/IS_mapper doi: "https://doi.org/10.1186/s12864-015-1860-2" - licence: ['BSD'] + licence: ['BSD-3-Clause'] input: - meta: diff --git a/modules/isoseq3/cluster/meta.yml b/modules/isoseq3/cluster/meta.yml index 280e0150..4086ab05 100644 --- a/modules/isoseq3/cluster/meta.yml +++ b/modules/isoseq3/cluster/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md tool_dev_url: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md doi: "" - licence: ['BSD-3-clause-Clear'] + licence: ['BSD-3-Clause-Clear'] input: - meta: diff --git a/modules/isoseq3/refine/meta.yml b/modules/isoseq3/refine/meta.yml index 81b57c7c..eefd015b 100644 --- a/modules/isoseq3/refine/meta.yml +++ b/modules/isoseq3/refine/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md tool_dev_url: https://github.com/PacificBiosciences/IsoSeq/blob/master/isoseq-clustering.md doi: "" - licence: ['BSD-3-clause-Clear'] + licence: ['BSD-3-Clause-Clear'] input: - meta: diff --git a/modules/ivar/consensus/meta.yml b/modules/ivar/consensus/meta.yml index 389e5fe6..2ee5f2c6 100644 --- a/modules/ivar/consensus/meta.yml +++ b/modules/ivar/consensus/meta.yml @@ -10,6 +10,7 @@ tools: iVar - a computational package that contains functions broadly useful for viral amplicon-based sequencing. homepage: https://github.com/andersen-lab/ivar documentation: https://andersen-lab.github.io/ivar/html/manualpage.html + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/ivar/trim/meta.yml b/modules/ivar/trim/meta.yml index 4798c25f..44bc742e 100644 --- a/modules/ivar/trim/meta.yml +++ b/modules/ivar/trim/meta.yml @@ -10,6 +10,7 @@ tools: iVar - a computational package that contains functions broadly useful for viral amplicon-based sequencing. homepage: https://github.com/andersen-lab/ivar documentation: https://andersen-lab.github.io/ivar/html/manualpage.html + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/ivar/variants/meta.yml b/modules/ivar/variants/meta.yml index a689ffeb..fd3fce9e 100644 --- a/modules/ivar/variants/meta.yml +++ b/modules/ivar/variants/meta.yml @@ -10,6 +10,7 @@ tools: iVar - a computational package that contains functions broadly useful for viral amplicon-based sequencing. homepage: https://github.com/andersen-lab/ivar documentation: https://andersen-lab.github.io/ivar/html/manualpage.html + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/kallisto/index/meta.yml b/modules/kallisto/index/meta.yml index a4fb08c3..dd952e33 100644 --- a/modules/kallisto/index/meta.yml +++ b/modules/kallisto/index/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://pachterlab.github.io/kallisto/manual tool_dev_url: https://github.com/pachterlab/kallisto doi: "" - licence: ['BSD_2_clause'] + licence: ['BSD-2-Clause'] input: - fasta: diff --git a/modules/kraken2/kraken2/meta.yml b/modules/kraken2/kraken2/meta.yml index 5b849c3e..4b894705 100644 --- a/modules/kraken2/kraken2/meta.yml +++ b/modules/kraken2/kraken2/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://ccb.jhu.edu/software/kraken2/ documentation: https://github.com/DerrickWood/kraken2/wiki/Manual doi: 10.1186/s13059-019-1891-0 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/lima/meta.yml b/modules/lima/meta.yml index d77246c6..567632df 100644 --- a/modules/lima/meta.yml +++ b/modules/lima/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://lima.how/ tool_dev_url: https://github.com/pacificbiosciences/barcoding/ doi: "" - licence: ['BSD-3-clause-Clear'] + licence: ['BSD-3-Clause-Clear'] input: - meta: diff --git a/modules/methyldackel/extract/meta.yml b/modules/methyldackel/extract/meta.yml index 6c87f7c9..3c1dfb2a 100644 --- a/modules/methyldackel/extract/meta.yml +++ b/modules/methyldackel/extract/meta.yml @@ -17,6 +17,7 @@ tools: homepage: https://github.com/brentp/bwa-meth documentation: https://github.com/brentp/bwa-meth arxiv: arXiv:1401.1129 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/methyldackel/mbias/meta.yml b/modules/methyldackel/mbias/meta.yml index 4bc8f016..e66cde50 100644 --- a/modules/methyldackel/mbias/meta.yml +++ b/modules/methyldackel/mbias/meta.yml @@ -18,6 +18,7 @@ tools: homepage: https://github.com/brentp/bwa-meth documentation: https://github.com/brentp/bwa-meth arxiv: arXiv:1401.1129 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/minia/meta.yml b/modules/minia/meta.yml index 255bcc20..397a1d49 100644 --- a/modules/minia/meta.yml +++ b/modules/minia/meta.yml @@ -9,6 +9,7 @@ tools: a human genome on a desktop computer in a day. The output of Minia is a set of contigs. homepage: https://github.com/GATB/minia documentation: https://github.com/GATB/minia + licence: ['AGPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/minimap2/align/meta.yml b/modules/minimap2/align/meta.yml index 35ed411b..9994fb05 100644 --- a/modules/minimap2/align/meta.yml +++ b/modules/minimap2/align/meta.yml @@ -13,6 +13,7 @@ tools: A versatile pairwise aligner for genomic and spliced nucleotide sequences. homepage: https://github.com/lh3/minimap2 documentation: https://github.com/lh3/minimap2#uguide + licence: ['MIT'] input: - meta: type: map diff --git a/modules/minimap2/index/meta.yml b/modules/minimap2/index/meta.yml index e8450add..78a39bdd 100644 --- a/modules/minimap2/index/meta.yml +++ b/modules/minimap2/index/meta.yml @@ -10,6 +10,7 @@ tools: A versatile pairwise aligner for genomic and spliced nucleotide sequences. homepage: https://github.com/lh3/minimap2 documentation: https://github.com/lh3/minimap2#uguide + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/mosdepth/meta.yml b/modules/mosdepth/meta.yml index 5627c268..be568aa6 100644 --- a/modules/mosdepth/meta.yml +++ b/modules/mosdepth/meta.yml @@ -11,6 +11,7 @@ tools: Fast BAM/CRAM depth calculation for WGS, exome, or targeted sequencing. documentation: https://github.com/brentp/mosdepth doi: 10.1093/bioinformatics/btx699 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/multiqc/meta.yml b/modules/multiqc/meta.yml index a54f95ac..63c75a45 100644 --- a/modules/multiqc/meta.yml +++ b/modules/multiqc/meta.yml @@ -11,6 +11,7 @@ tools: It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ + licence: ['GPL-3.0-or-later'] input: - multiqc_files: type: file diff --git a/modules/nanolyse/meta.yml b/modules/nanolyse/meta.yml index c59607fa..326fc221 100644 --- a/modules/nanolyse/meta.yml +++ b/modules/nanolyse/meta.yml @@ -8,6 +8,7 @@ tools: DNA contaminant removal using NanoLyse homepage: https://github.com/wdecoster/nanolyse documentation: https://github.com/wdecoster/nanolyse#nanolyse + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/nanoplot/meta.yml b/modules/nanoplot/meta.yml index 0527624f..52ebb622 100644 --- a/modules/nanoplot/meta.yml +++ b/modules/nanoplot/meta.yml @@ -13,6 +13,7 @@ tools: alignment. homepage: http://nanoplot.bioinf.be documentation: https://github.com/wdecoster/NanoPlot + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/optitype/meta.yml b/modules/optitype/meta.yml index 15912125..37654463 100644 --- a/modules/optitype/meta.yml +++ b/modules/optitype/meta.yml @@ -10,7 +10,7 @@ tools: homepage: https://github.com/FRED-2/OptiType documentation: https://github.com/FRED-2/OptiType doi: "10.1093/bioinformatics/btu548" - licence: ['BSD'] + licence: ['BSD-3-Clause'] input: - meta: diff --git a/modules/pangolin/meta.yml b/modules/pangolin/meta.yml index 29878ef0..a2c0979a 100644 --- a/modules/pangolin/meta.yml +++ b/modules/pangolin/meta.yml @@ -10,6 +10,7 @@ tools: Phylogenetic Assignment of Named Global Outbreak LINeages homepage: https://github.com/cov-lineages/pangolin#pangolearn-description manual: https://github.com/cov-lineages/pangolin#pangolearn-description + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/pbbam/pbmerge/meta.yml b/modules/pbbam/pbmerge/meta.yml index c483ca40..7042d86b 100644 --- a/modules/pbbam/pbmerge/meta.yml +++ b/modules/pbbam/pbmerge/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://pbbam.readthedocs.io/en/latest/tools/pbmerge.html tool_dev_url: https://github.com/pacificbiosciences/pbbam/ doi: "" - licence: ['BSD-3-clause-Clear'] + licence: ['BSD-3-Clause-Clear'] input: - meta: diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index b476c829..ef0899a1 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://ccs.how/ tool_dev_url: https://github.com/PacificBiosciences/ccs doi: "" - licence: ['BSD-3-clause-Clear'] + licence: ['BSD-3-Clause-Clear'] input: - meta: diff --git a/modules/picard/collectmultiplemetrics/meta.yml b/modules/picard/collectmultiplemetrics/meta.yml index 587983a1..613afc62 100644 --- a/modules/picard/collectmultiplemetrics/meta.yml +++ b/modules/picard/collectmultiplemetrics/meta.yml @@ -14,6 +14,7 @@ tools: data and formats such as SAM/BAM/CRAM and VCF. homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/ + licence: ['MIT'] input: - meta: type: map diff --git a/modules/picard/collectwgsmetrics/meta.yml b/modules/picard/collectwgsmetrics/meta.yml index 7ae2d41d..5b4d8139 100644 --- a/modules/picard/collectwgsmetrics/meta.yml +++ b/modules/picard/collectwgsmetrics/meta.yml @@ -13,6 +13,7 @@ tools: data and formats such as SAM/BAM/CRAM and VCF. homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/ + licence: ['MIT'] input: - meta: type: map diff --git a/modules/picard/markduplicates/meta.yml b/modules/picard/markduplicates/meta.yml index 13f2d350..c9a08b36 100644 --- a/modules/picard/markduplicates/meta.yml +++ b/modules/picard/markduplicates/meta.yml @@ -14,6 +14,7 @@ tools: data and formats such as SAM/BAM/CRAM and VCF. homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/ + licence: ['MIT'] input: - meta: type: map diff --git a/modules/picard/mergesamfiles/meta.yml b/modules/picard/mergesamfiles/meta.yml index f732daf4..3d010c3c 100644 --- a/modules/picard/mergesamfiles/meta.yml +++ b/modules/picard/mergesamfiles/meta.yml @@ -12,6 +12,7 @@ tools: data and formats such as SAM/BAM/CRAM and VCF. homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/ + licence: ['MIT'] input: - meta: type: map diff --git a/modules/picard/sortsam/meta.yml b/modules/picard/sortsam/meta.yml index 3e0fb450..aa90e456 100644 --- a/modules/picard/sortsam/meta.yml +++ b/modules/picard/sortsam/meta.yml @@ -11,6 +11,7 @@ tools: data and formats such as SAM/BAM/CRAM and VCF. homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/ + licence: ['MIT'] input: - meta: diff --git a/modules/qcat/meta.yml b/modules/qcat/meta.yml index 938bc337..e0ab6a0f 100644 --- a/modules/qcat/meta.yml +++ b/modules/qcat/meta.yml @@ -9,6 +9,7 @@ tools: A demultiplexer for Nanopore samples homepage: https://github.com/nanoporetech/qcat documentation: https://github.com/nanoporetech/qcat#qcat + licence: ['MPL-2.0'] input: - meta: type: map diff --git a/modules/qualimap/bamqc/meta.yml b/modules/qualimap/bamqc/meta.yml index cc0471fc..6888d30e 100644 --- a/modules/qualimap/bamqc/meta.yml +++ b/modules/qualimap/bamqc/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://qualimap.bioinfo.cipf.es/ documentation: http://qualimap.conesalab.org/doc_html/index.html doi: 10.1093/bioinformatics/bts503 + licence: ['GPL-2.0-only'] input: - meta: type: map diff --git a/modules/quast/meta.yml b/modules/quast/meta.yml index 8b692e9e..05faa8b8 100644 --- a/modules/quast/meta.yml +++ b/modules/quast/meta.yml @@ -9,7 +9,8 @@ tools: description: | QUAST calculates quality metrics for genome assemblies homepage: http://bioinf.spbau.ru/quast - doi: + doi: https://doi.org/10.1093/bioinformatics/btt086 + licence: ['GPL-2.0-only'] input: - consensus: type: file diff --git a/modules/rsem/calculateexpression/meta.yml b/modules/rsem/calculateexpression/meta.yml index e2fb8f6d..fdfaa0c4 100644 --- a/modules/rsem/calculateexpression/meta.yml +++ b/modules/rsem/calculateexpression/meta.yml @@ -11,6 +11,7 @@ tools: homepage: https://github.com/deweylab/RSEM documentation: https://github.com/deweylab/RSEM doi: https://doi.org/10.1186/1471-2105-12-323 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rsem/preparereference/meta.yml b/modules/rsem/preparereference/meta.yml index 94f7cc05..062f0256 100644 --- a/modules/rsem/preparereference/meta.yml +++ b/modules/rsem/preparereference/meta.yml @@ -10,6 +10,7 @@ tools: homepage: https://github.com/deweylab/RSEM documentation: https://github.com/deweylab/RSEM doi: https://doi.org/10.1186/1471-2105-12-323 + licence: ['GPL-3.0-or-later'] input: - fasta: type: file diff --git a/modules/rseqc/bamstat/meta.yml b/modules/rseqc/bamstat/meta.yml index 64a0b9e4..561ba195 100644 --- a/modules/rseqc/bamstat/meta.yml +++ b/modules/rseqc/bamstat/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/inferexperiment/meta.yml b/modules/rseqc/inferexperiment/meta.yml index 63710d7b..88eabc8a 100644 --- a/modules/rseqc/inferexperiment/meta.yml +++ b/modules/rseqc/inferexperiment/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/innerdistance/meta.yml b/modules/rseqc/innerdistance/meta.yml index 7eea1350..27bcf242 100644 --- a/modules/rseqc/innerdistance/meta.yml +++ b/modules/rseqc/innerdistance/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/junctionannotation/meta.yml b/modules/rseqc/junctionannotation/meta.yml index 5562b0b7..56364232 100644 --- a/modules/rseqc/junctionannotation/meta.yml +++ b/modules/rseqc/junctionannotation/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/junctionsaturation/meta.yml b/modules/rseqc/junctionsaturation/meta.yml index ffa359ab..05d814ad 100644 --- a/modules/rseqc/junctionsaturation/meta.yml +++ b/modules/rseqc/junctionsaturation/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/readdistribution/meta.yml b/modules/rseqc/readdistribution/meta.yml index d12ad600..4c736878 100644 --- a/modules/rseqc/readdistribution/meta.yml +++ b/modules/rseqc/readdistribution/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/rseqc/readduplication/meta.yml b/modules/rseqc/readduplication/meta.yml index 98d25ea4..3623de80 100644 --- a/modules/rseqc/readduplication/meta.yml +++ b/modules/rseqc/readduplication/meta.yml @@ -11,6 +11,7 @@ tools: homepage: http://rseqc.sourceforge.net/ documentation: http://rseqc.sourceforge.net/ doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/salmon/index/meta.yml b/modules/salmon/index/meta.yml index c956f15c..3b0cd853 100644 --- a/modules/salmon/index/meta.yml +++ b/modules/salmon/index/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://salmon.readthedocs.io/en/latest/salmon.html manual: https://salmon.readthedocs.io/en/latest/salmon.html doi: 10.1038/nmeth.4197 + licence: ['GPL-3.0-or-later'] input: - genome_fasta: type: file diff --git a/modules/salmon/quant/meta.yml b/modules/salmon/quant/meta.yml index 47e81229..223ca82b 100644 --- a/modules/salmon/quant/meta.yml +++ b/modules/salmon/quant/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://salmon.readthedocs.io/en/latest/salmon.html manual: https://salmon.readthedocs.io/en/latest/salmon.html doi: 10.1038/nmeth.4197 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/samtools/ampliconclip/meta.yml b/modules/samtools/ampliconclip/meta.yml index 7aa8c6bd..8959b98d 100644 --- a/modules/samtools/ampliconclip/meta.yml +++ b/modules/samtools/ampliconclip/meta.yml @@ -15,6 +15,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index 6e63b671..16c0b334 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -12,6 +12,7 @@ tools: homepage: http://www.htslib.org/ documentation: http://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/samtools/fastq/meta.yml b/modules/samtools/fastq/meta.yml index 9a45886b..91fd476d 100644 --- a/modules/samtools/fastq/meta.yml +++ b/modules/samtools/fastq/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/flagstat/meta.yml b/modules/samtools/flagstat/meta.yml index d408cb76..9bd9ff89 100644 --- a/modules/samtools/flagstat/meta.yml +++ b/modules/samtools/flagstat/meta.yml @@ -16,6 +16,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/idxstats/meta.yml b/modules/samtools/idxstats/meta.yml index f4cb613f..ec542f34 100644 --- a/modules/samtools/idxstats/meta.yml +++ b/modules/samtools/idxstats/meta.yml @@ -17,6 +17,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 5f4dd3fb..988e8f53 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/merge/meta.yml b/modules/samtools/merge/meta.yml index 1903cdaa..78b75b36 100644 --- a/modules/samtools/merge/meta.yml +++ b/modules/samtools/merge/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/mpileup/meta.yml b/modules/samtools/mpileup/meta.yml index ce55643a..fac7a5bc 100644 --- a/modules/samtools/mpileup/meta.yml +++ b/modules/samtools/mpileup/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/sort/meta.yml b/modules/samtools/sort/meta.yml index cd47c86d..3402a068 100644 --- a/modules/samtools/sort/meta.yml +++ b/modules/samtools/sort/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/stats/meta.yml b/modules/samtools/stats/meta.yml index d75d73e2..ae41498a 100644 --- a/modules/samtools/stats/meta.yml +++ b/modules/samtools/stats/meta.yml @@ -15,6 +15,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/samtools/view/meta.yml b/modules/samtools/view/meta.yml index 2e66e7cd..29d1ecc1 100644 --- a/modules/samtools/view/meta.yml +++ b/modules/samtools/view/meta.yml @@ -14,6 +14,7 @@ tools: homepage: http://www.htslib.org/ documentation: hhttp://www.htslib.org/doc/samtools.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/seacr/callpeak/meta.yml b/modules/seacr/callpeak/meta.yml index 22db567d..53b3415f 100644 --- a/modules/seacr/callpeak/meta.yml +++ b/modules/seacr/callpeak/meta.yml @@ -17,6 +17,7 @@ tools: (i.e. regions with no read coverage). homepage: https://github.com/FredHutch/SEACR documentation: https://github.com/FredHutch/SEACR + licence: ['GPL-2.0-only'] input: - meta: type: map diff --git a/modules/seqkit/split2/meta.yml b/modules/seqkit/split2/meta.yml index beb676ea..90eec7f9 100644 --- a/modules/seqkit/split2/meta.yml +++ b/modules/seqkit/split2/meta.yml @@ -10,6 +10,7 @@ tools: homepage: https://github.com/shenwei356/seqkit documentation: https://bioinf.shenwei.me/seqkit/ doi: 10.1371/journal.pone.0163962 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/sequenzautils/bam2seqz/meta.yml b/modules/sequenzautils/bam2seqz/meta.yml index 278f9750..e05d2fa9 100755 --- a/modules/sequenzautils/bam2seqz/meta.yml +++ b/modules/sequenzautils/bam2seqz/meta.yml @@ -8,7 +8,7 @@ tools: homepage: https://sequenza-utils.readthedocs.io/en/latest/index.html documentation: https://sequenza-utils.readthedocs.io/en/latest/index.html doi: 10.1093/annonc/mdu479 - + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/sequenzautils/gcwiggle/meta.yml b/modules/sequenzautils/gcwiggle/meta.yml index 4ecba04a..616e073b 100644 --- a/modules/sequenzautils/gcwiggle/meta.yml +++ b/modules/sequenzautils/gcwiggle/meta.yml @@ -8,6 +8,7 @@ tools: homepage: https://sequenza-utils.readthedocs.io/en/latest/index.html documentation: https://sequenza-utils.readthedocs.io/en/latest/index.html doi: 10.1093/annonc/mdu479 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/seqwish/induce/meta.yml b/modules/seqwish/induce/meta.yml index c5f9d4c7..281e3994 100644 --- a/modules/seqwish/induce/meta.yml +++ b/modules/seqwish/induce/meta.yml @@ -13,6 +13,7 @@ tools: sequences to a variation graph encoding the sequences and their alignments. homepage: https://github.com/ekg/seqwish documentation: https://github.com/ekg/seqwish + licence: ['MIT'] input: - meta: type: map diff --git a/modules/snpeff/meta.yml b/modules/snpeff/meta.yml index ba049c0e..8ba18683 100644 --- a/modules/snpeff/meta.yml +++ b/modules/snpeff/meta.yml @@ -9,6 +9,7 @@ tools: It annotates and predicts the effects of genetic variants on genes and proteins (such as amino acid changes). homepage: https://pcingola.github.io/SnpEff/ documentation: https://pcingola.github.io/SnpEff/se_introduction/ + licence: ['MIT'] params: - use_cache: type: boolean diff --git a/modules/snpsites/meta.yml b/modules/snpsites/meta.yml index 5361aa3e..381d25cc 100644 --- a/modules/snpsites/meta.yml +++ b/modules/snpsites/meta.yml @@ -9,6 +9,7 @@ tools: description: Rapidly extracts SNPs from a multi-FASTA alignment. homepage: https://www.sanger.ac.uk/tool/snp-sites/ documentation: https://github.com/sanger-pathogens/snp-sites + licence: ['GPL-3.0-or-later'] input: - alignment: type: file diff --git a/modules/sratools/fasterqdump/meta.yml b/modules/sratools/fasterqdump/meta.yml index ac61e71f..1478bed8 100644 --- a/modules/sratools/fasterqdump/meta.yml +++ b/modules/sratools/fasterqdump/meta.yml @@ -10,7 +10,7 @@ tools: homepage: https://github.com/ncbi/sra-tools documentation: https://github.com/ncbi/sra-tools/wiki tool_dev_url: https://github.com/ncbi/sra-tools - licence: ['Public Domain'] + licence: ['US-Government-Work'] input: - meta: diff --git a/modules/sratools/prefetch/meta.yml b/modules/sratools/prefetch/meta.yml index ab0a5ce5..22213b29 100644 --- a/modules/sratools/prefetch/meta.yml +++ b/modules/sratools/prefetch/meta.yml @@ -10,7 +10,7 @@ tools: homepage: https://github.com/ncbi/sra-tools documentation: https://github.com/ncbi/sra-tools/wiki tool_dev_url: https://github.com/ncbi/sra-tools - licence: ['Public Domain'] + licence: ['US-Government-Work'] input: - meta: diff --git a/modules/star/align/meta.yml b/modules/star/align/meta.yml index 00f955dd..2d78b81a 100644 --- a/modules/star/align/meta.yml +++ b/modules/star/align/meta.yml @@ -13,6 +13,7 @@ tools: homepage: https://github.com/alexdobin/STAR manual: https://github.com/alexdobin/STAR/blob/master/doc/STARmanual.pdf doi: 10.1093/bioinformatics/bts635 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/star/genomegenerate/meta.yml b/modules/star/genomegenerate/meta.yml index 09728b58..04ade195 100644 --- a/modules/star/genomegenerate/meta.yml +++ b/modules/star/genomegenerate/meta.yml @@ -13,6 +13,7 @@ tools: homepage: https://github.com/alexdobin/STAR manual: https://github.com/alexdobin/STAR/blob/master/doc/STARmanual.pdf doi: 10.1093/bioinformatics/bts635 + licence: ['MIT'] input: - fasta: type: file diff --git a/modules/stringtie/merge/meta.yml b/modules/stringtie/merge/meta.yml index 81eca6dc..02899766 100644 --- a/modules/stringtie/merge/meta.yml +++ b/modules/stringtie/merge/meta.yml @@ -10,6 +10,7 @@ tools: Transcript assembly and quantification for RNA-Seq homepage: https://ccb.jhu.edu/software/stringtie/index.shtml documentation: https://ccb.jhu.edu/software/stringtie/index.shtml?t=manual + licence: ['MIT'] input: - stringtie_gtf: type: file diff --git a/modules/stringtie/stringtie/meta.yml b/modules/stringtie/stringtie/meta.yml index 0074b90f..7e854caa 100644 --- a/modules/stringtie/stringtie/meta.yml +++ b/modules/stringtie/stringtie/meta.yml @@ -12,6 +12,7 @@ tools: Transcript assembly and quantification for RNA-Seq homepage: https://ccb.jhu.edu/software/stringtie/index.shtml documentation: https://ccb.jhu.edu/software/stringtie/index.shtml?t=manual + licence: ['MIT'] input: - meta: type: map diff --git a/modules/tabix/bgzip/meta.yml b/modules/tabix/bgzip/meta.yml index 0b0787bf..f8318c7c 100644 --- a/modules/tabix/bgzip/meta.yml +++ b/modules/tabix/bgzip/meta.yml @@ -11,6 +11,7 @@ tools: homepage: https://www.htslib.org/doc/tabix.html documentation: http://www.htslib.org/doc/bgzip.html doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/tabix/bgziptabix/meta.yml b/modules/tabix/bgziptabix/meta.yml index 5b4cc4e8..f2aed84d 100644 --- a/modules/tabix/bgziptabix/meta.yml +++ b/modules/tabix/bgziptabix/meta.yml @@ -12,6 +12,7 @@ tools: homepage: https://www.htslib.org/doc/tabix.html documentation: https://www.htslib.org/doc/tabix.1.html doi: 10.1093/bioinformatics/btq671 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/tabix/tabix/meta.yml b/modules/tabix/tabix/meta.yml index 15edf8c3..2e37c4ff 100644 --- a/modules/tabix/tabix/meta.yml +++ b/modules/tabix/tabix/meta.yml @@ -10,6 +10,7 @@ tools: homepage: https://www.htslib.org/doc/tabix.html documentation: https://www.htslib.org/doc/tabix.1.html doi: 10.1093/bioinformatics/btq671 + licence: ['MIT'] input: - meta: type: map diff --git a/modules/tiddit/sv/meta.yml b/modules/tiddit/sv/meta.yml index 4060a450..f788ffa6 100644 --- a/modules/tiddit/sv/meta.yml +++ b/modules/tiddit/sv/meta.yml @@ -10,6 +10,7 @@ tools: homepage: https://github.com/SciLifeLab/TIDDIT documentation: https://github.com/SciLifeLab/TIDDIT/blob/master/README.md doi: 10.12688/f1000research.11168.1 + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/trimgalore/meta.yml b/modules/trimgalore/meta.yml index 7c46bea9..c7e1df1d 100644 --- a/modules/trimgalore/meta.yml +++ b/modules/trimgalore/meta.yml @@ -13,6 +13,7 @@ tools: MspI-digested RRBS-type (Reduced Representation Bisufite-Seq) libraries. homepage: https://www.bioinformatics.babraham.ac.uk/projects/trim_galore/ documentation: https://github.com/FelixKrueger/TrimGalore/blob/master/Docs/Trim_Galore_User_Guide.md + licence: ['GPL-3.0-or-later'] input: - meta: type: map diff --git a/modules/untar/meta.yml b/modules/untar/meta.yml index 2b586c92..51f94995 100644 --- a/modules/untar/meta.yml +++ b/modules/untar/meta.yml @@ -8,6 +8,7 @@ tools: description: | Extract tar.gz files. documentation: https://www.gnu.org/software/tar/manual/ + licence: ['GPL-3.0-or-later'] input: - archive: type: file diff --git a/modules/unzip/meta.yml b/modules/unzip/meta.yml index 57c07f00..7bca1ec2 100644 --- a/modules/unzip/meta.yml +++ b/modules/unzip/meta.yml @@ -9,7 +9,7 @@ tools: homepage: https://sourceforge.net/projects/p7zip/ documentation: https://sourceforge.net/projects/p7zip/ tool_dev_url: https://sourceforge.net/projects/p7zip" - licence: "GNU LPGL" + licence: ['LGPL-2.1-or-later'] input: - archive: diff --git a/modules/variantbam/meta.yml b/modules/variantbam/meta.yml index 9394e418..ddcd0656 100644 --- a/modules/variantbam/meta.yml +++ b/modules/variantbam/meta.yml @@ -14,7 +14,7 @@ tools: documentation: https://github.com/walaj/VariantBam#table-of-contents tool_dev_url: https://github.com/walaj/VariantBam doi: 10.1093/bioinformatics/btw111 - licence: ['Apache2'] + licence: ['Apache-2.0'] input: - meta: From 81ed0e0ff2a138cd65146d167f614fa4f5871c8b Mon Sep 17 00:00:00 2001 From: Kevin Date: Sat, 23 Oct 2021 09:30:10 -0700 Subject: [PATCH 156/314] added meta.yml for umitools (#860) * added meta.yml for umitools * Update modules/umitools/dedup/meta.yml type: list --> type: file Co-authored-by: Harshil Patel * Update modules/umitools/dedup/meta.yml aww thanks @drpateh :D Co-authored-by: Harshil Patel * Update modules/umitools/dedup/meta.yml module can only handle one BAM at a time, ergo BAM files --> BAM file Co-authored-by: Harshil Patel * Update meta.yml added `pattern` for input `bam` and `bai` * removed trailing whitespace to appease linter * added license to new meta.yml files * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/umitools/dedup/meta.yml | 47 +++++++++++++++++++++++++++++++ modules/umitools/extract/meta.yml | 46 ++++++++++++++++++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 modules/umitools/dedup/meta.yml create mode 100644 modules/umitools/extract/meta.yml diff --git a/modules/umitools/dedup/meta.yml b/modules/umitools/dedup/meta.yml new file mode 100644 index 00000000..f89cc1ea --- /dev/null +++ b/modules/umitools/dedup/meta.yml @@ -0,0 +1,47 @@ +name: umitools_dedup +description: Deduplicate reads based on the mapping co-ordinate and the UMI attached to the read. +keywords: + - umitools + - deduplication +tools: + - umi_tools: + description: > + UMI-tools contains tools for dealing with Unique Molecular Identifiers (UMIs)/Random Molecular Tags (RMTs) + and single cell RNA-Seq cell barcodes + documentation: https://umi-tools.readthedocs.io/en/latest/ + license: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: | + BAM file containing reads to be deduplicated via UMIs. + pattern: "*.{bam}" + - bai: + type: file + description: | + BAM index files corresponding to the input BAM file. + pattern: "*.{bai}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file with deduplicated UMIs. + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@drpatelh" + - "@grst" + - "@klkeys" diff --git a/modules/umitools/extract/meta.yml b/modules/umitools/extract/meta.yml new file mode 100644 index 00000000..ae6f9fee --- /dev/null +++ b/modules/umitools/extract/meta.yml @@ -0,0 +1,46 @@ +name: umitools_extract +description: Extracts UMI barcode from a read and add it to the read name, leaving any sample barcode in place +keywords: + - umitools + - extract +tools: + - umi_tools: + description: > + UMI-tools contains tools for dealing with Unique Molecular Identifiers (UMIs)/Random Molecular Tags (RMTs) + and single cell RNA-Seq cell barcodes + documentation: https://umi-tools.readthedocs.io/en/latest/ + license: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: list + description: | + List of input FASTQ files whose UMIs will be extracted. +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Extracted FASTQ files. | + For single-end reads, pattern is \${prefix}.umi_extract.fastq.gz. | + For paired-end reads, pattern is \${prefix}.umi_extract_{1,2}.fastq.gz. + pattern: "*.{fastq.gz}" + - log: + type: file + description: Logfile for umi_tools + pattern: "*.{log}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@drpatelh" + - "@grst" From 7676d9d7282c74a429ed08962db5556295f17108 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Sat, 23 Oct 2021 18:45:19 +0100 Subject: [PATCH 157/314] Filtermutectcalls (#796) * first commit with files for filtermutectcalls initialised * found missing test file that needs to be resolved * saving config changes * fixing pytest_module conflict * finished module, just needs repository side tests added * test data added, versions file updated * modified to emit correct versions file * Update main.nf * Update test_data.config * updated test script * fixed main.nf * Update main.nf * Update main.nf * removed whitespace from test script * Update test_data.config * Update .gitignore * Update test_data.config * tests changed to new names, main script edited to match comments on learnreads pr * Update meta.yml * Apply suggestions from code review * Update main.nf * Apply suggestions from code review * Apply suggestions from code review * Update main.nf * Update main.nf * Update main.nf * Update main.nf Co-authored-by: GCJMackenzie Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/gatk4/filtermutectcalls/functions.nf | 78 +++++++++++++++++ modules/gatk4/filtermutectcalls/main.nf | 65 ++++++++++++++ modules/gatk4/filtermutectcalls/meta.yml | 84 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/gatk4/filtermutectcalls/main.nf | 65 ++++++++++++++ .../modules/gatk4/filtermutectcalls/test.yml | 35 ++++++++ 6 files changed, 331 insertions(+) create mode 100644 modules/gatk4/filtermutectcalls/functions.nf create mode 100644 modules/gatk4/filtermutectcalls/main.nf create mode 100644 modules/gatk4/filtermutectcalls/meta.yml create mode 100644 tests/modules/gatk4/filtermutectcalls/main.nf create mode 100644 tests/modules/gatk4/filtermutectcalls/test.yml diff --git a/modules/gatk4/filtermutectcalls/functions.nf b/modules/gatk4/filtermutectcalls/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/filtermutectcalls/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf new file mode 100644 index 00000000..5a784677 --- /dev/null +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -0,0 +1,65 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_FILTERMUTECTCALLS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) + path fasta + path fastaidx + path dict + + output: + tuple val(meta), path("*.vcf.gz") , emit: vcf + tuple val(meta), path("*.vcf.gz.tbi") , emit: tbi + tuple val(meta), path("*.filteringStats.tsv"), emit: stats + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + def orientationbias_options = '' + if (orientationbias) { + orientationbias_options = '--orientation-bias-artifact-priors ' + orientationbias.join(' --orientation-bias-artifact-priors ') + } + + def segmentation_options = '' + if (segmentation) { + segmentation_options = '--tumor-segmentation ' + segmentation.join(' --tumor-segmentation ') + } + + def contamination_options = contaminationest ? " --contamination-estimate ${contaminationest} " : '' + if (contaminationfile) { + contamination_options = '--contamination-table ' + contaminationfile.join(' --contamination-table ') + } + """ + gatk FilterMutectCalls \\ + -R $fasta \\ + -V $vcf \\ + $orientationbias_options \\ + $segmentation_options \\ + $contamination_options \\ + -O ${prefix}.vcf.gz \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/filtermutectcalls/meta.yml b/modules/gatk4/filtermutectcalls/meta.yml new file mode 100644 index 00000000..f14f9404 --- /dev/null +++ b/modules/gatk4/filtermutectcalls/meta.yml @@ -0,0 +1,84 @@ +name: gatk4_filtermutectcalls +description: | + Filters the raw output of mutect2, can optionally use outputs of calculatecontamination and learnreadorientationmodel to improve filtering. +keywords: + - filtermutectcalls + - mutect2 + - gatk4 + - filtervcf +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - vcf: + type: file + description: compressed vcf file of mutect2calls + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of vcf file + pattern: "*vcf.gz.tbi" + - stats: + type: file + description: Stats file that pairs with output vcf file + pattern: "*vcf.gz.stats" + - orientationbias: + type: list + description: files containing artifact priors for input vcf. Optional input. + pattern: "*.artifact-prior.tar.gz" + - segmentation: + type: list + description: tables containing segmentation information for input vcf. Optional input. + pattern: "*.segmentation.table" + - contaminationfile: + type: list + description: table(s) containing contamination contamination data for input vcf. Optional input, takes priority over contaminationest. + pattern: "*.contamination.table" + - contaminationest: + type: val + description: estimation of contamination value as a double. Optional input, will only be used if contaminationfile is not specified. + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fastaidx: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + +output: + - vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - tbi: + type: file + description: tbi file that pairs with vcf. + pattern: "*.vcf.gz.tbi" + - stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7093790b..dfa00bd0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -410,6 +410,10 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** +gatk4/filtermutectcalls: + - modules/gatk4/filtermutectcalls/** + - tests/modules/gatk4/filtermutectcalls/** + gatk4/getpileupsummaries: - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf new file mode 100644 index 00000000..a425238b --- /dev/null +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -0,0 +1,65 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' addParams( options: [suffix:'.filtered'] ) + +workflow test_gatk4_filtermutectcalls_base { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_stats'], checkIfExists: true), + [], + [], + [], + [] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) +} + +workflow test_gatk4_filtermutectcalls_with_files { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_stats'], checkIfExists: true), + [ file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_artifact_prior_tar_gz'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_segmentation_table'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_contamination_table'], checkIfExists: true) ], + [] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) +} + +workflow test_gatk4_filtermutectcalls_use_val { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_stats'], checkIfExists: true), + [ file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_artifact_prior_tar_gz'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_segmentation_table'], checkIfExists: true) ], + [], + '20.0' + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) +} diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml new file mode 100644 index 00000000..b17a306c --- /dev/null +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -0,0 +1,35 @@ +- name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_base + command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/filtermutectcalls + files: + - path: output/gatk4/test.filtered.vcf.gz + - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv + md5sum: 98e1b87a52999eb8f429ef4a7877eb3f + - path: output/gatk4/test.filtered.vcf.gz.tbi + md5sum: d88d2b745c9226ddf284e3494db8b9d2 + +- name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files + command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/filtermutectcalls + files: + - path: output/gatk4/test.filtered.vcf.gz + - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv + md5sum: 98e1b87a52999eb8f429ef4a7877eb3f + - path: output/gatk4/test.filtered.vcf.gz.tbi + md5sum: d88d2b745c9226ddf284e3494db8b9d2 + +- name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val + command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/filtermutectcalls + files: + - path: output/gatk4/test.filtered.vcf.gz + - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv + md5sum: 98e1b87a52999eb8f429ef4a7877eb3f + - path: output/gatk4/test.filtered.vcf.gz.tbi + md5sum: d88d2b745c9226ddf284e3494db8b9d2 From 481d3c811d07c35ddf7dbf2ee528575bbfb8254c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sat, 23 Oct 2021 18:55:28 +0100 Subject: [PATCH 158/314] New module: `gstama/collapse` (#809) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Remove junk files * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: delete unnecessary files * 👌 IMPROVE: Update + clean - Remove unnecessary files - Update to new versions.yml file - Better output channels * 👌 IMPROVE: Update meta.yml and output channels * 👌 IMPROVE: Remove useless files * 👌 IMPROVE: Remove automatic MODE setup * 👌 IMPROVE: Applied @jfy133 code modification suggestions * Update modules/gstama/collapse/meta.yml Co-authored-by: James A. Fellows Yates * 🐛 FIX: Add missing fasta option in meta.yml * 🐛 FIX: Fix typo * 🐛 FIX: Update package version * Update main.nf * Update meta.yml * Update modules/gstama/collapse/meta.yml * Apply suggestions from code review * Update tests/modules/gstama/collapse/main.nf * Update main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/gstama/collapse/functions.nf | 78 ++++++++++++++++++++++++ modules/gstama/collapse/main.nf | 52 ++++++++++++++++ modules/gstama/collapse/meta.yml | 83 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/gstama/collapse/main.nf | 16 +++++ tests/modules/gstama/collapse/test.yml | 22 +++++++ 6 files changed, 255 insertions(+) create mode 100644 modules/gstama/collapse/functions.nf create mode 100644 modules/gstama/collapse/main.nf create mode 100644 modules/gstama/collapse/meta.yml create mode 100644 tests/modules/gstama/collapse/main.nf create mode 100644 tests/modules/gstama/collapse/test.yml diff --git a/modules/gstama/collapse/functions.nf b/modules/gstama/collapse/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gstama/collapse/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf new file mode 100644 index 00000000..d4167b5e --- /dev/null +++ b/modules/gstama/collapse/main.nf @@ -0,0 +1,52 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GSTAMA_COLLAPSE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" + } else { + container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" + } + + input: + tuple val(meta), path(bam) + path fasta + + output: + tuple val(meta), path("*.bed") , emit: bed + tuple val(meta), path("*_trans_read.bed") , emit: bed_trans_reads + tuple val(meta), path("*_local_density_error.txt"), emit: local_density_error + tuple val(meta), path("*_polya.txt") , emit: polya + tuple val(meta), path("*_read.txt") , emit: read + tuple val(meta), path("*_strand_check.txt") , emit: strand_check + tuple val(meta), path("*_trans_report.txt") , emit: trans_report + path "versions.yml" , emit: versions + + tuple val(meta), path("*_varcov.txt") , emit: varcov , optional: true + tuple val(meta), path("*_variants.txt") , emit: variants, optional: true + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + tama_collapse.py \\ + -s $bam \\ + -f $fasta \\ + -p ${prefix} \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) + END_VERSIONS + """ +} diff --git a/modules/gstama/collapse/meta.yml b/modules/gstama/collapse/meta.yml new file mode 100644 index 00000000..0b26191f --- /dev/null +++ b/modules/gstama/collapse/meta.yml @@ -0,0 +1,83 @@ +name: GSTAMA_COLLAPSE +description: Collapse redundant transcript models in Iso-Seq data. +keywords: + - tama_collapse.py + - isoseq + - nanopore + - long-read + - transcriptome + - gene model + - TAMA +tools: + - tama_collapse.py: + description: Collapse similar gene model + homepage: https://github.com/sguizard/gs-tama + documentation: https://github.com/GenomeRIK/tama/wiki + tool_dev_url: https://github.com/sguizard/gs-tama + doi: 10.1186/s12864-020-07123-7 + licence: GNU GPL3 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bam: + type: file + description: A sorted BAM or sam file of aligned reads + pattern: "*.{bam,sam}" + - fasta: + type: file + description: A fasta file of the genome used for the mapping + pattern: "*.{fasta,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: a bed12 format file containing the final collapsed version of your transcriptome + pattern: "*.bed" + - bed_trans_reads: + type: file + description: This file uses bed12 format to show the transcript model for each read based on the mapping prior to collapsing. This only contains the reads which were accepted according to the defined thresholds. You can use this file to see if there were any strange occurrences during collapsing. It also contains the relationships between reads and collapsed transcript models. The 1st subfield in the 4th column shows the final transcript ID and the 2nd subfield in the 4th column shows the read ID. If you used no_cap mode for collapsing there may be multiple lines for a single read. This happens when a 5' degraded read can match to multiple 5' longer transcript models. + pattern: "*_trans_read.bed" + - local_density_error: + type: file + description: This file contains the log of filtering for local density error around the splice junctions ("-lde") + pattern: "*_local_density_error.txt" + - polya: + type: file + description: This file contains the reads with potential poly A truncation. + pattern: "*_polya.txt" + - read: + type: file + description: This file contains information for all mapped reads from the input SAM/BAM file. It shows both accepted and discarded reads and should match the number of mapped reads in your SAM/BAM file + pattern: "*_read.txt" + - strand_check: + type: file + description: This file shows instances where the sam flag strand information contrasted the GMAP strand information. + pattern: "*_strand_check.txt" + - trans_report: + type: file + description: This file contains collapsing information for each transcript. + pattern: "*_trans_report.txt" + - varcov: + type: file + description: This file contains the coverage information for each variant detected. + pattern: "*_varcov.txt" + - variants: + type: file + description: This file contains the variants called. Variants are only called if 5 or more reads show the variant at a specific locus. If you would like to change the threshold, please make an issue about this in the Github repo. + pattern: "*_variants.txt" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index dfa00bd0..d1a8e7f4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -490,6 +490,10 @@ graphmap2/index: - modules/graphmap2/index/** - tests/modules/graphmap2/index/** +gstama/collapse: + - modules/gstama/collapse/** + - tests/modules/gstama/collapse/** + gtdbtk/classifywf: - modules/gtdbtk/classifywf/** - tests/modules/gtdbtk/classifywf/** diff --git a/tests/modules/gstama/collapse/main.nf b/tests/modules/gstama/collapse/main.nf new file mode 100644 index 00000000..70b3c741 --- /dev/null +++ b/tests/modules/gstama/collapse/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' addParams( options: [ args:"-x capped -b BAM", suffix:'_tc' ] ) + +workflow test_gstama_collapse { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['pacbio']['aligned'], checkIfExists: true) + ] + genome = file(params.test_data['homo_sapiens']['genome']['genome2_fasta'], checkIfExists: true) + + GSTAMA_COLLAPSE ( input, genome ) +} diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml new file mode 100644 index 00000000..98de6bb3 --- /dev/null +++ b/tests/modules/gstama/collapse/test.yml @@ -0,0 +1,22 @@ +- name: gstama collapse test_gstama_collapse + command: nextflow run tests/modules/gstama/collapse -entry test_gstama_collapse -c tests/config/nextflow.config + tags: + - gstama + - gstama/collapse + files: + - path: output/gstama/test_tc.bed + md5sum: e5105198ed970a33ae0ecaa7bff421d9 + - path: output/gstama/test_tc_local_density_error.txt + md5sum: b917ac1f14eccd590b6881a686f324d5 + - path: output/gstama/test_tc_polya.txt + md5sum: 628ea62b918fc4f31e109f724d714a66 + - path: output/gstama/test_tc_read.txt + md5sum: d2685d7f24cd1611e0770a5ce25422fe + - path: output/gstama/test_tc_strand_check.txt + md5sum: 42cc52b2660b1e0b84e1c9ab37a965ec + - path: output/gstama/test_tc_trans_read.bed + md5sum: 0ca1a32f33ef05242d897d913802554b + - path: output/gstama/test_tc_trans_report.txt + md5sum: 33a86c15ca2acce36b2a5962f4c1adc4 + - path: output/gstama/test_tc_variants.txt + md5sum: 5b1165e9f33faba4f7207013fc27257e From d3369789dafca9aaa7e2535727705872c13385ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sat, 23 Oct 2021 19:00:39 +0100 Subject: [PATCH 159/314] New module: `gstama/merge` (#813) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: ignore test data * 👌 IMPROVE : add test bed files * 📦 NEW: Add gstama/merge module * 🐛 FIX: Change process label * 👌 IMPROVE: do not merge empty bed * 🐛 FIX: Change 0 lines files detection * 🐛 FIX: replace spaces by tab * 🐛 FIX: Remove tuple for report channel and add version output channel * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Update test * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Fix Typos * 👌 IMPROVE: Updates + clean code - Update to last versions.yml file - Better output channels - Update meta.yml * 👌 IMPROVE: Correct typo * 👌 IMPROVE: Remove included filelist creation and add an input channel * 🐛 FIX: Correct typo * 👌 IMPROVE: Add filelist file * 🐛 FIX: tama_merge.py emit a version number * Update modules/gstama/merge/meta.yml Co-authored-by: James A. Fellows Yates * 👌 IMPROVE: Update meta.yml * Update main.nf * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/gstama/merge/functions.nf | 78 +++++++++++++++++++++++++++++ modules/gstama/merge/main.nf | 46 +++++++++++++++++ modules/gstama/merge/meta.yml | 60 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 5 +- tests/modules/gstama/merge/main.nf | 19 +++++++ tests/modules/gstama/merge/test.yml | 14 ++++++ 7 files changed, 224 insertions(+), 2 deletions(-) create mode 100644 modules/gstama/merge/functions.nf create mode 100644 modules/gstama/merge/main.nf create mode 100644 modules/gstama/merge/meta.yml create mode 100644 tests/modules/gstama/merge/main.nf create mode 100644 tests/modules/gstama/merge/test.yml diff --git a/modules/gstama/merge/functions.nf b/modules/gstama/merge/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gstama/merge/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf new file mode 100644 index 00000000..37d685f6 --- /dev/null +++ b/modules/gstama/merge/main.nf @@ -0,0 +1,46 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GSTAMA_MERGE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" + } else { + container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" + } + + input: + tuple val(meta), path(bed) + path filelist + + output: + tuple val(meta), path("*.bed") , emit: bed + tuple val(meta), path("*_gene_report.txt") , emit: gene_report + tuple val(meta), path("*_merge.txt") , emit: merge + tuple val(meta), path("*_trans_report.txt"), emit: trans_report + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + tama_merge.py \\ + -f $filelist \\ + -d merge_dup \\ + -p ${prefix} \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( tama_merge.py -version | head -n1 ) + END_VERSIONS + """ +} diff --git a/modules/gstama/merge/meta.yml b/modules/gstama/merge/meta.yml new file mode 100644 index 00000000..1351b864 --- /dev/null +++ b/modules/gstama/merge/meta.yml @@ -0,0 +1,60 @@ +name: gstama_merge +description: Merge multiple transcriptomes while maintaining source information. +keywords: + - gstama + - gstama/merge + - long-read + - isoseq + - nanopore + - tama + - trancriptome + - annotation +tools: + - gstama: + description: Gene-Switch Transcriptome Annotation by Modular Algorithms + homepage: https://github.com/sguizard/gs-tama + documentation: https://github.com/GenomeRIK/tama/wiki + tool_dev_url: https://github.com/sguizard/gs-tama + doi: "https://doi.org/10.1186/s12864-020-07123-7" + licence: ['GPL v3 License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: bed12 file generated by TAMA collapse + pattern: "*.bed" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - bed: + type: file + description: This is the main merged annotation file. Transcripts are coloured according to the source support for each model. Sources are numbered based on the order supplied in the input filelist file. For example the first file named in the filelist file would have its transcripts coloured in red. If a transcript has multiple sources the colour is shown as magenta. + pattern: "*.bed" + - gene_report: + type: file + description: This contains a report of the genes from the merged file. "num_clusters" refers to the number of source transcripts that were used to make this gene model. "num_final_trans" refers to the number of transcripts in the final gene model. + pattern: "*_gene_report.txt" + - merge: + type: file + description: This contains a bed12 format file which shows the coordinates of each input transcript matched to the merged transcript ID. I used the "txt" extension even though it is a bed file just to avoid confusion with the main bed file. You can use this file to map the final merged transcript models to their pre-merged supporting transcripts. The 1st subfield in the 4th column shows the final merged transcript ID while the 2nd subfield shows the pre-merged transcript ID with source prefix. + pattern: "*_merge.txt" + - trans_report: + type: file + description: This contains the source information for each merged transcript. + pattern: "*_trans_report.txt" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d1a8e7f4..8ffc958b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -494,6 +494,10 @@ gstama/collapse: - modules/gstama/collapse/** - tests/modules/gstama/collapse/** +gstama/merge: + - modules/gstama/merge/** + - tests/modules/gstama/merge/** + gtdbtk/classifywf: - modules/gtdbtk/classifywf/** - tests/modules/gtdbtk/classifywf/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 1abae34d..6beba163 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -224,8 +224,9 @@ params { singletons = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.bam" aligned = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned.bam" alignedbai = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned.bam.bai" - genemodel1 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.bed" - genemodel2 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bam/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.2.bed" + genemodel1 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bed/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.bed" + genemodel2 = "${test_data_dir}/genomics/homo_sapiens/pacbio/bed/alz.ccs.fl.NEB_5p--NEB_Clontech_3p.flnc.clustered.singletons.merged.aligned_tc.2.bed" + filelist = "${test_data_dir}/genomics/homo_sapiens/pacbio/txt/filelist.txt" } } } diff --git a/tests/modules/gstama/merge/main.nf b/tests/modules/gstama/merge/main.nf new file mode 100644 index 00000000..f9a8e05f --- /dev/null +++ b/tests/modules/gstama/merge/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' addParams( options: [suffix:'_merged'] ) + +workflow test_gstama_merge { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['pacbio']['genemodel1'], checkIfExists: true), + file(params.test_data['homo_sapiens']['pacbio']['genemodel2'], checkIfExists: true) + ] + ] + filelist = file(params.test_data['homo_sapiens']['pacbio']['filelist'], checkIfExists: true) + + GSTAMA_MERGE ( input, filelist ) +} diff --git a/tests/modules/gstama/merge/test.yml b/tests/modules/gstama/merge/test.yml new file mode 100644 index 00000000..b98e35b6 --- /dev/null +++ b/tests/modules/gstama/merge/test.yml @@ -0,0 +1,14 @@ +- name: gstama merge test_gstama_merge + command: nextflow run tests/modules/gstama/merge -entry test_gstama_merge -c tests/config/nextflow.config + tags: + - gstama + - gstama/merge + files: + - path: output/gstama/test_merged.bed + md5sum: 60ec34e1ff9655d4ce2e83d3f4bbf448 + - path: output/gstama/test_merged_gene_report.txt + md5sum: 7029fd183dfd905a233403cfbe44722a + - path: output/gstama/test_merged_merge.txt + md5sum: 4279e59ed5739ce4f2f811568962893f + - path: output/gstama/test_merged_trans_report.txt + md5sum: 97d8346d9eb9da140941656c3a3325cd From 8524e6b40f9337196ef3e6cafd61a141704a2c2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sat, 23 Oct 2021 19:09:41 +0100 Subject: [PATCH 160/314] Update of `pbccs` (#835) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: fill contains args * 👌 IMPROVE: One output => One Channel * 👌 IMPROVE: One input => One channel * 🐛 FIX: Update tests * 🐛 FIX: Remove TODOs from test.yaml * 👌 IMPROVE: Revert and keep bam and pbi together * 🐛 FIX: Remove old rq input from meta.yml * 👌 IMPROVE: Update test to match input channels * 👌 IMPROVE: use prefix for for output file name * 👌 IMPROVE: Update to new versions.yml * 👌 IMPROVE: Update pbccs from v6.0.0 to v6.0.2 * 👌 IMPROVE: Keep track of the former sample id in meta * Update modules/pbccs/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: remove former_id from meta * 👌 IMPROVE: Use chunk number in output filename Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/pbccs/main.nf | 31 ++++++++---------- modules/pbccs/meta.yml | 2 +- ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 0 -> 3558 bytes tests/modules/pbccs/main.nf | 2 +- tests/modules/pbccs/test.yml | 20 +++++------ 5 files changed, 26 insertions(+), 29 deletions(-) create mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 49c47fda..7e70ac14 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -11,11 +11,11 @@ process PBCCS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::pbccs=6.0.0" : null) + conda (params.enable_conda ? "bioconda::pbccs=6.2.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbccs:6.0.0--h9ee0642_2" + container "https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0" } else { - container "quay.io/biocontainers/pbccs:6.0.0--h9ee0642_2" + container "quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0" } input: @@ -24,25 +24,22 @@ process PBCCS { val chunk_on output: - tuple val(meta), path("*.ccs.bam") , emit: bam - tuple val(meta), path("*.ccs.bam.pbi") , emit: pbi - tuple val(meta), path("*.ccs_report.txt" ) , emit: ccs_report_txt - tuple val(meta), path("*.ccs_report.json" ) , emit: ccs_report_json - tuple val(meta), path("*.zmw_metrics.json.gz"), emit: zmw_metrics - path "versions.yml" , emit: versions + tuple val(meta), path("*.chunk*.bam") , emit: bam + tuple val(meta), path("*.chunk*.bam.pbi") , emit: pbi + tuple val(meta), path("*.report.txt" ) , emit: report_txt + tuple val(meta), path("*.report.json" ) , emit: report_json + tuple val(meta), path("*.metrics.json.gz"), emit: metrics + path "versions.yml" , emit: versions script: - def ccs = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs.bam' - def report_txt = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.txt' - def report_json = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.ccs_report.json' - def zmw_metrics = bam.toString().replaceAll(/bam$/, '') + chunk_num + '.zmw_metrics.json.gz' + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ ccs \\ $bam \\ - $ccs \\ - --report-file $report_txt \\ - --report-json $report_json \\ - --metrics-json $zmw_metrics \\ + ${prefix}.chunk${chunk_num}.bam \\ + --report-file ${prefix}.report.txt \\ + --report-json ${prefix}.report.json \\ + --metrics-json ${prefix}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ $options.args diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index ef0899a1..38f31496 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -16,7 +16,7 @@ input: type: map description: | Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] + id: id of the split file - bam: type: file description: Raw subreads bam diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33acb8369a1bc62b5e66e1ed80e2247dd0e2759f GIT binary patch literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Sat, 23 Oct 2021 19:30:14 +0100 Subject: [PATCH 161/314] Createsomaticpanelofnormals (#859) * files created for createsompon, script written, meta written, still needs tests * updated to 2.0.0 method input, however this requires a genomicsDB input now * script finished, meta yaml updated. Tests working locally, test yaml made, needs genomicsdb example on nf-core to run repository tests * versions updated, issue with test data not able to download directory * updated tests to include repo-side data * Apply suggestions from code review * Update modules/gatk4/createsomaticpanelofnormals/main.nf Co-authored-by: GCJMackenzie Co-authored-by: Harshil Patel --- .../createsomaticpanelofnormals/functions.nf | 78 +++++++++++++++++++ .../gatk4/createsomaticpanelofnormals/main.nf | 47 +++++++++++ .../createsomaticpanelofnormals/meta.yml | 55 +++++++++++++ tests/config/pytest_modules.yml | 4 + .../gatk4/createsomaticpanelofnormals/main.nf | 72 +++++++++++++++++ .../createsomaticpanelofnormals/test.yml | 9 +++ 6 files changed, 265 insertions(+) create mode 100644 modules/gatk4/createsomaticpanelofnormals/functions.nf create mode 100644 modules/gatk4/createsomaticpanelofnormals/main.nf create mode 100644 modules/gatk4/createsomaticpanelofnormals/meta.yml create mode 100644 tests/modules/gatk4/createsomaticpanelofnormals/main.nf create mode 100644 tests/modules/gatk4/createsomaticpanelofnormals/test.yml diff --git a/modules/gatk4/createsomaticpanelofnormals/functions.nf b/modules/gatk4/createsomaticpanelofnormals/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/createsomaticpanelofnormals/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf new file mode 100644 index 00000000..66dfda23 --- /dev/null +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_CREATESOMATICPANELOFNORMALS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(genomicsdb) + path fasta + path fastaidx + path dict + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + gatk \\ + CreateSomaticPanelOfNormals \\ + -R $fasta \\ + -V gendb://$genomicsdb \\ + -O ${prefix}.vcf.gz \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/createsomaticpanelofnormals/meta.yml b/modules/gatk4/createsomaticpanelofnormals/meta.yml new file mode 100644 index 00000000..f0199ed6 --- /dev/null +++ b/modules/gatk4/createsomaticpanelofnormals/meta.yml @@ -0,0 +1,55 @@ +name: gatk4_createsomaticpanelofnormals +description: Create a panel of normals contraining germline and artifactual sites for use with mutect2. +keywords: + - gatk4 + - createsomaticpanelofnormals + - panelofnormals +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - genoomicsdb: + type: directory + description: genomicsDB workspace that contains the samples to create the somatic panel of normals with. + pattern: "*_genomicsDBworkspace" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fastaidx: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + +output: + - vcf: + type: file + description: panel of normal as compressed vcf file + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of vcf file + pattern: "*vcf.gz.tbi" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8ffc958b..fac11d0e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -406,6 +406,10 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** +gatk4/createsomaticpanelofnormals: + - modules/gatk4/createsomaticpanelofnormals/** + - tests/modules/gatk4/createsomaticpanelofnormals/** + gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf new file mode 100644 index 00000000..34fc9847 --- /dev/null +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -0,0 +1,72 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' addParams( options: [suffix:'.pon'] ) + +workflow test_gatk4_createsomaticpanelofnormals { + maindir = file('test_genomicsdb') + subdir1 = file('test_genomicsdb/chr22$1$40001') + subdir2 = file('test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448') + subdir3 = file('test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir') + subdir2.mkdirs() + subdir3.mkdirs() + + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/__tiledb_workspace.tdb' , checkIfExists: true).copyTo(maindir) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/vcfheader.vcf' , checkIfExists: true).copyTo(maindir) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/vidmap.json' , checkIfExists: true).copyTo(maindir) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/callset.json' , checkIfExists: true).copyTo(maindir) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/.__consolidation_lock' , checkIfExists: true).copyTo(subdir1) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__array_schema.tdb' , checkIfExists: true).copyTo(subdir1) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json' , checkIfExists: true).copyTo(subdir3) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_meta_2b25a6c2-cb94-4a4a-9005-acb7c595d322.json' , checkIfExists: true).copyTo(subdir3) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/AD.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/AD_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ALT.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ALT_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/BaseQRankSum.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DB.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DP.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DP_FORMAT.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/END.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ExcessHet.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/FILTER.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/FILTER_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GQ.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GT.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GT_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ID.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ID_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/InbreedingCoeff.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MIN_DP.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAC.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAC_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAF.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAF_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MQRankSum.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PGT.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PGT_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PID.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PID_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PL.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PL_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PS.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/QUAL.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/RAW_MQandDP.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/REF.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/REF_var.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ReadPosRankSum.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/SB.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__book_keeping.tdb.gz' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__coords.tdb' , checkIfExists: true).copyTo(subdir2) + file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__tiledb_fragment.tdb' , checkIfExists: true).copyTo(subdir2) + + input = [ [ id:'test' ], // meta map + file( maindir , checkIfExists: true)] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_CREATESOMATICPANELOFNORMALS ( input, fasta, fastaidx, dict ) +} diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml new file mode 100644 index 00000000..d3e6c537 --- /dev/null +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -0,0 +1,9 @@ +- name: gatk4 createsomaticpanelofnormals test_gatk4_createsomaticpanelofnormals + command: nextflow run tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/createsomaticpanelofnormals + files: + - path: output/gatk4/test.pon.vcf.gz + - path: output/gatk4/test.pon.vcf.gz.tbi + md5sum: d88d2b745c9226ddf284e3494db8b9d2 From 0a5ddd0ad0f3035253fab569caa631fa0a81db9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sat, 23 Oct 2021 19:39:27 +0100 Subject: [PATCH 162/314] Update module: `bamtools/split` (#885) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add bamtools module * 👌 IMPROVE: Ignore test data * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Update module to last template version * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update with new versions.yml file * 🐛 FIX: Update meta.yml + correct typos * Update modules/bamtools/split/meta.yml Add bam, split, chunk tags Co-authored-by: James A. Fellows Yates * 🐛 FIX: Correct meta.yml * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Update test.yml Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/bamtools/split/main.nf | 2 +- tests/modules/bamtools/split/test.yml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 506a957c..86eaa5d6 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -35,7 +35,7 @@ process BAMTOOLS_SPLIT { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - bamtools: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) + ${getSoftwareName(task.process)}: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) END_VERSIONS """ } diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index f92f9345..f28a9bcf 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,10 +1,10 @@ - name: bamtools split test_bamtools_split command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config tags: - - bamtools - bamtools/split + - bamtools files: - - path: output/bamtools/test.paired_end.sorted.REF_chr22:16570000-16610000.bam - md5sum: 256535b9a3ab5864be0f7dea2218d159 + - path: output/bamtools/test.paired_end.sorted.REF_chr22.bam + md5sum: b7dc50e0edf9c6bfc2e3b0e6d074dc07 - path: output/bamtools/test.paired_end.sorted.REF_unmapped.bam - md5sum: 568e058d871d8bc319330360bcae4e43 + md5sum: e0754bf72c51543b2d745d96537035fb From 2ad98162f355671061f457c7c79db45c13478086 Mon Sep 17 00:00:00 2001 From: Gregor Sturm Date: Sun, 24 Oct 2021 20:51:56 +0200 Subject: [PATCH 163/314] Notebook modules (#617) * Draft rmarkdown module * stub jupyter notebook module * Create yaml file with params * Update meta.yml for rmarkdown module * Add comment on YAML * Update notebooks module, clean up parametrize.nf * Two separate channels for parameters and input files * Fix Rmd render script * Add tests for rmarkdown * Fix tests for rmarkdown module * Update checksums * Fix tests for jupyter * Test without Grab() * Update software versions * update rmarkdown dependencies * Draft for multiple versions * Fix indent of script * Fix indent in rmarkdown script * Emit version.syml * Update modules/rmarkdown/main.nf Co-authored-by: James A. Fellows Yates * Update modules/rmarkdown/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/rmarkdown/meta.yml Co-authored-by: James A. Fellows Yates * Rename rmarkdown to rmarkdownnotebook * Add rmarkdown mulled biocontainer * Write sessionInfo to separate log file * Update rmarkdownnotebook * Sessioninfo does not have a stable md5sum * Update jupyternotebook * Update meta * Add jupyternotebook biocontainers * Handle Groovy Gstrings in parameterize * Update to versions.yml * Update functions.nf * Fix versions yaml * Fix EC lint * Update modules/rmarkdownnotebook/main.nf Co-authored-by: James A. Fellows Yates * Update modules/jupyternotebook/main.nf Co-authored-by: James A. Fellows Yates * Use official test data * Harshilify * Make parameters channel clearer * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Update main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/jupyternotebook/functions.nf | 78 +++++++++++++++++++ modules/jupyternotebook/main.nf | 92 ++++++++++++++++++++++ modules/jupyternotebook/meta.yml | 68 +++++++++++++++++ modules/jupyternotebook/parametrize.nf | 44 +++++++++++ modules/rmarkdownnotebook/functions.nf | 78 +++++++++++++++++++ modules/rmarkdownnotebook/main.nf | 97 ++++++++++++++++++++++++ modules/rmarkdownnotebook/meta.yml | 73 ++++++++++++++++++ modules/rmarkdownnotebook/parametrize.nf | 36 +++++++++ tests/config/pytest_modules.yml | 8 ++ tests/config/test_data.config | 10 +++ tests/modules/jupyternotebook/main.nf | 49 ++++++++++++ tests/modules/jupyternotebook/test.yml | 30 ++++++++ tests/modules/rmarkdownnotebook/main.nf | 33 ++++++++ tests/modules/rmarkdownnotebook/test.yml | 27 +++++++ 14 files changed, 723 insertions(+) create mode 100644 modules/jupyternotebook/functions.nf create mode 100644 modules/jupyternotebook/main.nf create mode 100644 modules/jupyternotebook/meta.yml create mode 100644 modules/jupyternotebook/parametrize.nf create mode 100644 modules/rmarkdownnotebook/functions.nf create mode 100644 modules/rmarkdownnotebook/main.nf create mode 100644 modules/rmarkdownnotebook/meta.yml create mode 100644 modules/rmarkdownnotebook/parametrize.nf create mode 100644 tests/modules/jupyternotebook/main.nf create mode 100644 tests/modules/jupyternotebook/test.yml create mode 100644 tests/modules/rmarkdownnotebook/main.nf create mode 100644 tests/modules/rmarkdownnotebook/test.yml diff --git a/modules/jupyternotebook/functions.nf b/modules/jupyternotebook/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/jupyternotebook/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf new file mode 100644 index 00000000..2d8ad92f --- /dev/null +++ b/modules/jupyternotebook/main.nf @@ -0,0 +1,92 @@ +// Import generic module functions +include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' +include { dump_params_yml; indent_code_block } from "./parametrize" + +params.options = [:] +options = initOptions(params.options) +params.parametrize = true +params.implicit_params = true +params.meta_params = true + +process JUPYTERNOTEBOOK { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + //NB: You likely want to override this with a container containing all required + //dependencies for your analysis. The container at least needs to contain the + //ipykernel, jupytext, papermill and nbconvert Python packages. + conda (params.enable_conda ? "ipykernel=6.0.3 jupytext=1.11.4 nbconvert=6.1.0 papermill=2.3.3 matplotlib=3.4.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" + } else { + container "quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" + } + + input: + tuple val(meta), path(notebook) + val parameters + path input_files + + output: + tuple val(meta), path("*.html"), emit: report + tuple val(meta), path("artifacts/"), emit: artifacts, optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + // Dump parameters to yaml file. + // Using a yaml file over using the CLI params because + // * no issue with escaping + // * allows to pass nested maps instead of just single values + def params_cmd = "" + def render_cmd = "" + if (params.parametrize) { + nb_params = [:] + if (params.implicit_params) { + nb_params["cpus"] = task.cpus + nb_params["artifact_dir"] = "artifacts" + nb_params["input_dir"] = "./" + } + if (params.meta_params) { + nb_params["meta"] = meta + } + nb_params += parameters + params_cmd = dump_params_yml(nb_params) + render_cmd = "papermill -f .params.yml" + } else { + render_cmd = "papermill" + } + + """ + set -o pipefail + + # Dump .params.yml heredoc (section will be empty if parametrization is disabled) + ${indent_code_block(params_cmd, 4)} + + # Create output directory + mkdir artifacts + + # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources + export MKL_NUM_THREADS="${task.cpus}" + export OPENBLAS_NUM_THREADS="${task.cpus}" + export OMP_NUM_THREADS="${task.cpus}" + export NUMBA_NUM_THREADS="${task.cpus}" + + # Convert notebook to ipynb using jupytext, execute using papermill, convert using nbconvert + jupytext --to notebook --output - --set-kernel - ${notebook} \\ + | ${render_cmd} \\ + | jupyter nbconvert --stdin --to html --output ${prefix}.html + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + jupytext: \$(jupytext --version) + ipykernel: \$(python -c "import ipykernel; print(ipykernel.__version__)") + nbconvert: \$(jupyter nbconvert --version) + papermill: \$(papermill --version | cut -f1 -d' ') + END_VERSIONS + """ +} diff --git a/modules/jupyternotebook/meta.yml b/modules/jupyternotebook/meta.yml new file mode 100644 index 00000000..3a1b61e1 --- /dev/null +++ b/modules/jupyternotebook/meta.yml @@ -0,0 +1,68 @@ +name: jupyternotebook +description: | + Render jupyter (or jupytext) notebooks to HTML reports. Supports parametrization + through papermill. +keywords: + - Python + - Jupyter + - jupytext + - papermill + - notebook + - reports +tools: + - jupytext: + description: Jupyter notebooks as plain text scripts or markdown documents + homepage: https://github.com/mwouts/jupytext/ + documentation: https://jupyter.org/documentation + tool_dev_url: https://github.com/mwouts/jupytext/ + licence: "MIT" + - papermill: + description: Parameterize, execute, and analyze notebooks + homepage: https://github.com/nteract/papermill + documentation: http://papermill.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/nteract/papermill + licence: "BSD 3-clause" + - nbconvert: + description: Parameterize, execute, and analyze notebooks + homepage: https://nbconvert.readthedocs.io/en/latest/ + documentation: https://nbconvert.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/jupyter/nbconvert + licence: "BSD 3-clause" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - notebook: + type: file + description: Jupyter notebook or jupytext representation thereof + pattern: "*.{ipynb,py,md,Rmd,myst}" + - parameters: + type: map + description: | + Groovy map with notebook parameters which will be passed + to papermill in order to create parametrized reports. + - input_files: + type: path + description: One or multiple files serving as input data for the notebook. + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - report: + type: file + description: HTML report generated from Jupyter notebook + pattern: "*.html" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@grst" diff --git a/modules/jupyternotebook/parametrize.nf b/modules/jupyternotebook/parametrize.nf new file mode 100644 index 00000000..bd74af27 --- /dev/null +++ b/modules/jupyternotebook/parametrize.nf @@ -0,0 +1,44 @@ +import org.yaml.snakeyaml.Yaml +import org.yaml.snakeyaml.representer.Representer +import org.yaml.snakeyaml.DumperOptions + + +/** + * Multiline code blocks need to have the same indentation level + * as the `script:` section. This function re-indents code to the specified level. + */ +def indent_code_block(code, n_spaces) { + def indent_str = " ".multiply(n_spaces) + return code.stripIndent().split("\n").join("\n" + indent_str) +} + +/** + * Create a config YAML file from a groovy map + * + * @params task The process' `task` variable + * @returns a line to be inserted in the bash script. + */ +def dump_params_yml(params) { + DumperOptions options = new DumperOptions(); + options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); + + // Properly handle Groovy GStrings + // see https://stackoverflow.com/a/35108062/2340703 + def representer = new Representer() {{ + this.multiRepresenters.put(GString, this.representers.get(String)) + }} + + def yaml = new Yaml(representer, options) + def yaml_str = yaml.dump(params) + + // Writing the .params.yml file directly as follows does not work. + // It only works in 'exec:', but not if there is a `script:` section: + // task.workDir.resolve('.params.yml').text = yaml_str + + // Therefore, we inject it into the bash script: + return """\ + cat <<"END_PARAMS_SECTION" > ./.params.yml + ${indent_code_block(yaml_str, 8)} + END_PARAMS_SECTION + """ +} diff --git a/modules/rmarkdownnotebook/functions.nf b/modules/rmarkdownnotebook/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/rmarkdownnotebook/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf new file mode 100644 index 00000000..4bded58c --- /dev/null +++ b/modules/rmarkdownnotebook/main.nf @@ -0,0 +1,97 @@ +// Import generic module functions +include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' +include { dump_params_yml; indent_code_block } from "./parametrize" + +params.options = [:] +options = initOptions(params.options) +params.parametrize = true +params.implicit_params = true +params.meta_params = true + +process RMARKDOWNNOTEBOOK { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + //NB: You likely want to override this with a container containing all required + //dependencies for your analysis. The container at least needs to contain the + //yaml and rmarkdown R packages. + conda (params.enable_conda ? "r-base=4.1.0 r-rmarkdown=2.9 r-yaml=2.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" + } else { + container "quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" + } + + input: + tuple val(meta), path(notebook) + val parameters + path input_files + + output: + tuple val(meta), path("*.html") , emit: report + tuple val(meta), path ("artifacts/*") , emit: artifacts, optional: true + tuple val(meta), path ("session_info.log"), emit: session_info + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + // Dump parameters to yaml file. + // Using a yaml file over using the CLI params because + // * no issue with escaping + // * allows to pass nested maps instead of just single values + def params_cmd = "" + def render_cmd = "" + if (params.parametrize) { + nb_params = [:] + if (params.implicit_params) { + nb_params["cpus"] = task.cpus + nb_params["artifact_dir"] = "artifacts" + nb_params["input_dir"] = "./" + } + if (params.meta_params) { + nb_params["meta"] = meta + } + nb_params += parameters + params_cmd = dump_params_yml(nb_params) + render_cmd = """\ + params = yaml::read_yaml('.params.yml') + rmarkdown::render('${prefix}.Rmd', params=params, envir=new.env()) + """ + } else { + render_cmd = "rmarkdown::render('${prefix}.Rmd')" + } + + """ + # Dump .params.yml heredoc (section will be empty if parametrization is disabled) + ${indent_code_block(params_cmd, 4)} + + # Create output directory + mkdir artifacts + + # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources + export MKL_NUM_THREADS="${task.cpus}" + export OPENBLAS_NUM_THREADS="${task.cpus}" + export OMP_NUM_THREADS="${task.cpus}" + + # Work around https://github.com/rstudio/rmarkdown/issues/1508 + # If the symbolic link is not replaced by a physical file + # output- and temporary files will be written to the original directory. + mv "${notebook}" "${notebook}.orig" + cp -L "${notebook}.orig" "${prefix}.Rmd" + + # Render notebook + Rscript - < versions.yml + ${getProcessName(task.process)}: + rmarkdown: \$(Rscript -e "cat(paste(packageVersion('rmarkdown'), collapse='.'))") + END_VERSIONS + """ +} diff --git a/modules/rmarkdownnotebook/meta.yml b/modules/rmarkdownnotebook/meta.yml new file mode 100644 index 00000000..8d0f9d28 --- /dev/null +++ b/modules/rmarkdownnotebook/meta.yml @@ -0,0 +1,73 @@ +name: rmarkdownnotebook +description: Render an rmarkdown notebook. Supports parametrization. +keywords: + - R + - notebook + - reports +tools: + - rmarkdown: + description: Dynamic Documents for R + homepage: https://rmarkdown.rstudio.com/ + documentation: https://rmarkdown.rstudio.com/lesson-1.html + tool_dev_url: https://github.com/rstudio/rmarkdown + doi: "" + licence: GPL-3 + +params: + - parametrize: + type: boolean + description: If true, parametrize the notebook + - implicit_params: + type: boolean + description: | + If true (default), include the implicit params + * `input_dir`, which points to the directory containing the files added via `input_files`, + * `artifact_dir`, which points to the directory where the notebook should place output files, and + * `cpus`, which contains the value of ${task.cpus} + - meta_params: + type: boolean + description: | + If true, include a parameter `meta` which contains the information specified + via the `meta` input channel. + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - notebook: + type: file + description: Rmarkdown file + pattern: "*.{Rmd}" + - parameters: + type: map + description: | + Groovy map with notebook parameters which will be passed to + rmarkdown to generate parametrized reports. + - input_files: + type: path + description: One or multiple files serving as input data for the notebook. + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - report: + type: file + description: HTML report generated from Rmarkdown + pattern: "*.html" + - session_info: + type: file + description: dump of R SessionInfo + pattern: "*.log" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@grst" diff --git a/modules/rmarkdownnotebook/parametrize.nf b/modules/rmarkdownnotebook/parametrize.nf new file mode 100644 index 00000000..05e259eb --- /dev/null +++ b/modules/rmarkdownnotebook/parametrize.nf @@ -0,0 +1,36 @@ +import org.yaml.snakeyaml.Yaml +import org.yaml.snakeyaml.DumperOptions + + +/** + * Multiline code blocks need to have the same indentation level + * as the `script:` section. This function re-indents code to the specified level. + */ +def indent_code_block(code, n_spaces) { + def indent_str = " ".multiply(n_spaces) + return code.stripIndent().split("\n").join("\n" + indent_str) +} + +/** + * Create a config YAML file from a groovy map + * + * @params task The process' `task` variable + * @returns a line to be inserted in the bash script. + */ +def dump_params_yml(params) { + DumperOptions options = new DumperOptions(); + options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); + def yaml = new Yaml(options) + def yaml_str = yaml.dump(params) + + // Writing the .params.yml file directly as follows does not work. + // It only works in 'exec:', but not if there is a `script:` section: + // task.workDir.resolve('.params.yml').text = yaml_str + + // Therefore, we inject it into the bash script: + return """\ + cat <<"END_PARAMS_SECTION" > ./.params.yml + ${indent_code_block(yaml_str, 8)} + END_PARAMS_SECTION + """ +} diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index fac11d0e..4e35fa24 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -585,6 +585,10 @@ ivar/variants: - modules/ivar/variants/** - tests/modules/ivar/variants/** +jupyternotebook: + - modules/jupyternotebook/** + - tests/modules/jupyternotebook/** + kallisto/index: - modules/kallisto/index/** - tests/modules/kallisto/index/** @@ -871,6 +875,10 @@ raxmlng: - modules/raxmlng/** - tests/modules/raxmlng/** +rmarkdownnotebook: + - modules/rmarkdownnotebook/** + - tests/modules/rmarkdownnotebook/** + roary: - modules/roary/** - tests/modules/roary/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6beba163..d7eda458 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -229,5 +229,15 @@ params { filelist = "${test_data_dir}/genomics/homo_sapiens/pacbio/txt/filelist.txt" } } + 'generic' { + 'notebooks' { + rmarkdown = "${test_data_dir}/generic/notebooks/rmarkdown/rmarkdown_notebook.Rmd" + ipython_md = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.md" + ipython_ipynb = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.ipynb" + } + 'txt' { + hello = "${test_data_dir}/generic/txt/hello.txt" + } + } } } diff --git a/tests/modules/jupyternotebook/main.nf b/tests/modules/jupyternotebook/main.nf new file mode 100644 index 00000000..c1da7e11 --- /dev/null +++ b/tests/modules/jupyternotebook/main.nf @@ -0,0 +1,49 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' addParams( + parametrize: false, options: [:] +) +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' addParams( + options: [:] +) +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' addParams( + options: [:] +) + +workflow test_jupyternotebook { + + input = [ [ id:'test_jupyter' ], // meta map + file(params.test_data['generic']['notebooks']['ipython_md'], checkIfExists: true) ] + + JUPYTERNOTEBOOK ( input, [:], []) + +} + +workflow test_jupyternotebook_parametrize { + + input = [ [ id:'test_jupyter' ], // meta map + file(params.test_data['generic']['notebooks']['ipython_md'], checkIfExists: true) ] + + JUPYTERNOTEBOOK_PARAMETRIZE( + input, + [input_filename: "hello.txt", n_iter: 12], + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ) + +} + +workflow test_jupyternotebook_parametrize_ipynb { + + input = [ [ id:'test_jupyter' ], // meta map + file(params.test_data['generic']['notebooks']['ipython_ipynb'], checkIfExists: true) ] + + JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB( + input, + [input_filename: "hello.txt", n_iter: 12], + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ) + +} + diff --git a/tests/modules/jupyternotebook/test.yml b/tests/modules/jupyternotebook/test.yml new file mode 100644 index 00000000..dd4f1175 --- /dev/null +++ b/tests/modules/jupyternotebook/test.yml @@ -0,0 +1,30 @@ +- name: jupyternotebook test_jupyternotebook + command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook -c tests/config/nextflow.config + tags: + - jupyternotebook + files: + - path: output/jupyternotebook/test_jupyter.html + contains: + - "n_iter = 10" + +- name: jupyternotebook test_jupyternotebook_parametrize + command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c tests/config/nextflow.config + tags: + - jupyternotebook + files: + - path: output/jupyternotebook/artifacts/artifact.txt + md5sum: 8ddd8be4b179a529afa5f2ffae4b9858 + - path: output/jupyternotebook/test_jupyter.html + contains: + - "n_iter = 12" + +- name: jupyternotebook test_jupyternotebook_parametrize_ipynb + command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c tests/config/nextflow.config + tags: + - jupyternotebook + files: + - path: output/jupyternotebook/artifacts/artifact.txt + md5sum: 8ddd8be4b179a529afa5f2ffae4b9858 + - path: output/jupyternotebook/test_jupyter.html + contains: + - "n_iter = 12" diff --git a/tests/modules/rmarkdownnotebook/main.nf b/tests/modules/rmarkdownnotebook/main.nf new file mode 100644 index 00000000..e56d54ff --- /dev/null +++ b/tests/modules/rmarkdownnotebook/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' addParams( + parametrize: false, options: [:] +) +include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' addParams( + options: [:] +) + +workflow test_rmarkdown { + + input = [ [ id:'test_rmd' ], // meta map + file(params.test_data['generic']['notebooks']['rmarkdown'], checkIfExists: true) ] + + RMARKDOWNNOTEBOOK ( input, [:], []) + +} + +workflow test_rmarkdown_parametrize { + + input = [ [ id:'test_rmd' ], // meta map + file(params.test_data['generic']['notebooks']['rmarkdown'], checkIfExists: true) ] + + RMARKDOWNNOTEBOOK_PARAMETRIZE( + input, + [input_filename: "hello.txt", n_iter: 12], + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ) + +} + diff --git a/tests/modules/rmarkdownnotebook/test.yml b/tests/modules/rmarkdownnotebook/test.yml new file mode 100644 index 00000000..bef6086a --- /dev/null +++ b/tests/modules/rmarkdownnotebook/test.yml @@ -0,0 +1,27 @@ +- name: rmarkdownnotebook test_rmarkdown + command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown -c tests/config/nextflow.config + tags: + - rmarkdownnotebook + files: + - path: output/rmarkdownnotebook/session_info.log + contains: + - R version 4.1.0 + - yaml_2.2.1 + - path: output/rmarkdownnotebook/test_rmd.html + contains: + - "n_iter = 10" + +- name: rmarkdownnotebook test_rmarkdown_parametrize + command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c tests/config/nextflow.config + tags: + - rmarkdownnotebook + files: + - path: output/rmarkdownnotebook/artifacts/artifact.txt + md5sum: b10a8db164e0754105b7a99be72e3fe5 + - path: output/rmarkdownnotebook/session_info.log + contains: + - R version 4.1.0 + - yaml_2.2.1 + - path: output/rmarkdownnotebook/test_rmd.html + contains: + - "n_iter = 12" From a740a6ff4890be1abb4d4cb3f67c0cd332bd11d7 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 26 Oct 2021 22:54:10 +0200 Subject: [PATCH 164/314] New module: `checkm/lineagewf` (#899) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Added but test failing due to null on output channel * fix prefix variable * Complete checkm_lineagewf * Remove TODOs * Add description of fasta_ext parameter * Improve meta map for this context * Update meta.yml * Update modules/checkm/lineagewf/meta.yml Co-authored-by: James A. Fellows Yates * Update main.nf Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: Daniel Lundin --- modules/checkm/lineagewf/functions.nf | 78 +++++++++++++++++++++++++ modules/checkm/lineagewf/main.nf | 49 ++++++++++++++++ modules/checkm/lineagewf/meta.yml | 58 ++++++++++++++++++ tests/config/pytest_modules.yml | 6 +- tests/modules/checkm/lineagewf/main.nf | 24 ++++++++ tests/modules/checkm/lineagewf/test.yml | 35 +++++++++++ 6 files changed, 249 insertions(+), 1 deletion(-) create mode 100644 modules/checkm/lineagewf/functions.nf create mode 100644 modules/checkm/lineagewf/main.nf create mode 100644 modules/checkm/lineagewf/meta.yml create mode 100644 tests/modules/checkm/lineagewf/main.nf create mode 100644 tests/modules/checkm/lineagewf/test.yml diff --git a/modules/checkm/lineagewf/functions.nf b/modules/checkm/lineagewf/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/checkm/lineagewf/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf new file mode 100644 index 00000000..e655e5f5 --- /dev/null +++ b/modules/checkm/lineagewf/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CHECKM_LINEAGEWF { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::checkm-genome=1.1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1" + } else { + container "quay.io/biocontainers/checkm-genome:1.1.3--py_1" + } + + input: + tuple val(meta), path(fasta) + val fasta_ext + + output: + tuple val(meta), path("${prefix}") , emit: checkm_output + tuple val(meta), path("${prefix}.tsv"), emit: checkm_tsv + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + checkm \\ + lineage_wf \\ + -t $task.cpus \\ + -f ${prefix}.tsv \\ + --tab_table \\ + --pplacer_threads $task.cpus \\ + -x $fasta_ext \\ + $options.args \\ + . \\ + $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) + END_VERSIONS + """ +} diff --git a/modules/checkm/lineagewf/meta.yml b/modules/checkm/lineagewf/meta.yml new file mode 100644 index 00000000..29c6096e --- /dev/null +++ b/modules/checkm/lineagewf/meta.yml @@ -0,0 +1,58 @@ +name: checkm_lineagewf +description: CheckM provides a set of tools for assessing the quality of genomes recovered from isolates, single cells, or metagenomes. +keywords: + - checkm + - mag + - metagenome + - quality + - isolates + - microbes + - single cells + - completeness + - contamination + - bins + - genome bins +tools: + - checkm: + description: Assess the quality of microbial genomes recovered from isolates, single cells, and metagenomes. + homepage: https://ecogenomics.github.io/CheckM/ + documentation: https://github.com/Ecogenomics/CheckM/wiki + tool_dev_url: https://github.com/Ecogenomics/CheckM + doi: "10.1101/gr.186072.114" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: One or a list of multiple FASTA files of each bin, with extension defined with the fasta_ext value + pattern: "*.{$fasta_ext}" + - fasta_ext: + type: value + description: The file-type extension suffix of the input FASTA files (e.g., fasta, fna, fa, fas) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'sample', bin:'1' ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - checkm_output: + type: directory + description: CheckM output directory + pattern: "*/" + - checkm_tsv: + type: file + description: CheckM summary completeness statistics table + pattern: "*.tsv" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4e35fa24..343a0fc2 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -254,6 +254,10 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** +checkm/lineagewf: + - modules/checkm/lineagewf/** + - tests/modules/checkm/lineagewf/** + chromap/chromap: - modules/chromap/chromap/** - tests/modules/chromap/chromap/** @@ -1106,7 +1110,7 @@ ucsc/bigwigaverageoverbed: ucsc/liftover: - modules/ucsc/liftover/** - tests/modules/ucsc/liftover/** - + ucsc/wigtobigwig: - modules/ucsc/wigtobigwig/** - tests/modules/ucsc/wigtobigwig/** diff --git a/tests/modules/checkm/lineagewf/main.nf b/tests/modules/checkm/lineagewf/main.nf new file mode 100644 index 00000000..94309896 --- /dev/null +++ b/tests/modules/checkm/lineagewf/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' addParams( options: [:] ) + +workflow test_checkm_lineagewf { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] + fasta_ext = 'fasta' + + CHECKM_LINEAGEWF ( input, fasta_ext ) +} + +workflow test_checkm_lineagewf_multi { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)] ] + fasta_ext = 'fasta' + + CHECKM_LINEAGEWF ( input, fasta_ext ) +} diff --git a/tests/modules/checkm/lineagewf/test.yml b/tests/modules/checkm/lineagewf/test.yml new file mode 100644 index 00000000..768601b0 --- /dev/null +++ b/tests/modules/checkm/lineagewf/test.yml @@ -0,0 +1,35 @@ +- name: checkm lineagewf + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c tests/config/nextflow.config + tags: + - checkm + - checkm/lineagewf + files: + - path: output/checkm/test.tsv + md5sum: d5559764f563c4b55223e4e4a3dc1ec9 + - path: output/checkm/test/checkm.log + contains: + - "INFO: Parsing HMM hits to marker genes:" + - path: output/checkm/test/lineage.ms + contains: + - "# [Lineage Marker File]" + - "contigs" + - "UID1" + +- name: checkm lineagewf_multi + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c tests/config/nextflow.config + tags: + - checkm + - checkm/lineagewf + files: + - path: output/checkm/test.tsv + md5sum: 7e0fa177dcf151b84b7751813fbde3d1 + - path: output/checkm/test/checkm.log + contains: + - "INFO: Parsing HMM hits to marker genes:" + - path: output/checkm/test/lineage.ms + contains: + - "# [Lineage Marker File]" + - "contigs" + - "UID1" + - "genome" + From bd2baa1e7c0f0e3d052e10406d2f86125c51af20 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 26 Oct 2021 23:07:33 +0200 Subject: [PATCH 165/314] New module: `maxbin2` (#895) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Added, just need to finish tests once we have bacterial data * Add prelim test data * Fix version reporting * Add tests based on proposed test-dataset * Finalise new testdata * Fix md5sum issue by removing it... * Update main.nf * Apply suggestions from code review Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- modules/maxbin2/functions.nf | 78 ++++++++++++++++++++++++++++++++ modules/maxbin2/main.nf | 53 ++++++++++++++++++++++ modules/maxbin2/meta.yml | 79 +++++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 15 +++++++ tests/modules/maxbin2/main.nf | 17 +++++++ tests/modules/maxbin2/test.yml | 15 +++++++ 7 files changed, 261 insertions(+) create mode 100644 modules/maxbin2/functions.nf create mode 100644 modules/maxbin2/main.nf create mode 100644 modules/maxbin2/meta.yml create mode 100644 tests/modules/maxbin2/main.nf create mode 100644 tests/modules/maxbin2/test.yml diff --git a/modules/maxbin2/functions.nf b/modules/maxbin2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/maxbin2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf new file mode 100644 index 00000000..bcfa9590 --- /dev/null +++ b/modules/maxbin2/main.nf @@ -0,0 +1,53 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MAXBIN2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::maxbin2=2.2.7" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2" + } else { + container "quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2" + } + + input: + tuple val(meta), path(contigs), path(reads), path(abund) + + output: + tuple val(meta), path("*.fasta.gz") , emit: binned_fastas + tuple val(meta), path("*.summary") , emit: summary + tuple val(meta), path("*.log.gz") , emit: log + tuple val(meta), path("*.marker.gz") , emit: marker_counts + tuple val(meta), path("*.noclass.gz") , emit: unbinned_fasta + tuple val(meta), path("*.tooshort.gz"), emit: tooshort_fasta + tuple val(meta), path("*_bin.tar.gz") , emit: marker_bins , optional: true + tuple val(meta), path("*_gene.tar.gz"), emit: marker_genes, optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def associate_files = reads ? "-reads $reads" : "-abund $abund" + """ + run_MaxBin.pl \\ + -contig $contigs \\ + $associate_files \\ + -thread $task.cpus \\ + $options.args \\ + -out $prefix + + gzip *.fasta *.noclass *.tooshort *log *.marker + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + maxbin2: \$( run_MaxBin.pl -v | head -n 1 | sed 's/MaxBin //' ) + END_VERSIONS + """ +} diff --git a/modules/maxbin2/meta.yml b/modules/maxbin2/meta.yml new file mode 100644 index 00000000..358f8323 --- /dev/null +++ b/modules/maxbin2/meta.yml @@ -0,0 +1,79 @@ +name: maxbin2 +description: MaxBin is a software that is capable of clustering metagenomic contigs +keywords: + - metagenomics + - assembly + - binning + - maxbin2 + - de novo assembly + - mags + - metagenome-assembled genomes + - contigs +tools: + - maxbin2: + description: MaxBin is software for binning assembled metagenomic sequences based on an Expectation-Maximization algorithm. + homepage: https://sourceforge.net/projects/maxbin/ + documentation: https://sourceforge.net/projects/maxbin/ + tool_dev_url: https://sourceforge.net/projects/maxbin/ + doi: "10.1093/bioinformatics/btv638" + licence: ['BSD 3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - contigs: + type: file + description: Multi FASTA file containing assembled contigs of a given sample + pattern: "*.fasta" + - reads: + type: file + description: Reads used to assemble contigs in FASTA or FASTQ format. Do not supply at the same time as abundance files. + pattern: "*.fasta" + - abund: + type: file + description: Contig abundance files, i.e. reads against each contig. See MaxBin2 README for details. Do not supply at the same time as read files. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - binned_fastas: + type: file + description: Binned contigs, one per bin designated with numeric IDs + pattern: "*.fasta.gz" + - summary: + type: file + description: Summary file describing which contigs are being classified into which bin + pattern: "*.summary" + - log: + type: file + description: Log file recording the core steps of MaxBin algorithm + pattern: "*.log.gz" + - marker: + type: file + description: Marker gene presence numbers for each bin + pattern: "*.marker.gz" + - unbinned_fasta: + type: file + description: All sequences that pass the minimum length threshold but are not classified successfully. + pattern: "*.noclass.gz" + - tooshort_fasta: + type: file + description: All sequences that do not meet the minimum length threshold. + pattern: "*.tooshort.gz" + - marker_genes: + type: file + description: All sequences that do not meet the minimum length threshold. + pattern: "*.marker_of_each_gene.tar.gz" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 343a0fc2..98a6204c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -686,6 +686,10 @@ mashtree: - modules/mashtree/** - tests/modules/mashtree/** +maxbin2: + - modules/maxbin2/** + - tests/modules/maxbin2/** + megahit: - modules/megahit/** - tests/modules/megahit/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index d7eda458..858e7737 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -239,5 +239,20 @@ params { hello = "${test_data_dir}/generic/txt/hello.txt" } } + 'bacteroides_fragilis'{ + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.fna.gz" + } + 'illumina' { + test1_contigs_fa_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" + test1_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" + test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" + test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" + test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" + } + } } } diff --git a/tests/modules/maxbin2/main.nf b/tests/modules/maxbin2/main.nf new file mode 100644 index 00000000..bede2c6a --- /dev/null +++ b/tests/modules/maxbin2/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' addParams( options: [:] ) + +workflow test_maxbin2 { + + input = [ + [ id:'test1', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + [] + ] + + MAXBIN2 ( input ) +} diff --git a/tests/modules/maxbin2/test.yml b/tests/modules/maxbin2/test.yml new file mode 100644 index 00000000..2721d17a --- /dev/null +++ b/tests/modules/maxbin2/test.yml @@ -0,0 +1,15 @@ +- name: maxbin2 + command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c tests/config/nextflow.config + tags: + - maxbin2 + files: + - path: output/maxbin2/test1.001.fasta.gz + - path: output/maxbin2/test1.002.fasta.gz + - path: output/maxbin2/test1.log.gz + - path: output/maxbin2/test1.marker.gz + - path: output/maxbin2/test1.marker_of_each_bin.tar.gz + - path: output/maxbin2/test1.noclass.gz + - path: output/maxbin2/test1.summary + contains: + - "Bin name\tAbundance\tCompleteness\tGenome size\tGC content" + - path: output/maxbin2/test1.tooshort.gz From f47c27edfbc6181779699ff6b919d773d578aed7 Mon Sep 17 00:00:00 2001 From: Gisela Gabernet Date: Wed, 27 Oct 2021 00:09:04 +0200 Subject: [PATCH 166/314] Update checksum modules using human GTF (#900) * update test yml star align * update test yml stringtie merge * update star genomegenerate yml * update test yml rsem calculateexpression Co-authored-by: Harshil Patel --- .../modules/rsem/calculateexpression/test.yml | 2 +- tests/modules/star/align/test.yml | 1 + tests/modules/star/genomegenerate/test.yml | 2 +- tests/modules/stringtie/merge/test.yml | 52 ++++++++++++++----- 4 files changed, 42 insertions(+), 15 deletions(-) diff --git a/tests/modules/rsem/calculateexpression/test.yml b/tests/modules/rsem/calculateexpression/test.yml index 9eb5effc..ac0866ea 100644 --- a/tests/modules/rsem/calculateexpression/test.yml +++ b/tests/modules/rsem/calculateexpression/test.yml @@ -1,8 +1,8 @@ - name: rsem calculateexpression test_rsem_calculateexpression command: nextflow run tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c tests/config/nextflow.config tags: - - rsem/calculateexpression - rsem + - rsem/calculateexpression files: - path: output/index/rsem/Genome md5sum: a654229fbca6071dcb6b01ce7df704da diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 79ab38f1..47731c5c 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -81,6 +81,7 @@ md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: 38d08f0b944a2a1b981a250d675aa0d9 + - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index 1df59378..df8d5efc 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -1,8 +1,8 @@ - name: star genomegenerate test_star_genomegenerate command: nextflow run tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config tags: - - star/genomegenerate - star + - star/genomegenerate files: - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index ea47ad48..e6436612 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -1,23 +1,49 @@ - name: stringtie merge forward-strand - command: nextflow run ./tests/modules/stringtie/merge/ -entry test_stringtie_forward_merge -c tests/config/nextflow.config + command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c tests/config/nextflow.config tags: - stringtie - stringtie/merge files: - - path: ./output/stringtie/stringtie.merged.gtf - contains: - - 'stringtie' - - 'merge' - - 'chr22' + - path: output/stringtie/stringtie.merged.gtf + md5sum: 9fab7049ef2eafdea246fc787d1def40 + - path: output/stringtie/test.ballgown/e2t.ctab + md5sum: 9ae42e056c955a88a883e5e917840d77 + - path: output/stringtie/test.ballgown/e_data.ctab + md5sum: adbedee7b2f84c70362ad6dfa57442b7 + - path: output/stringtie/test.ballgown/i2t.ctab + md5sum: 658131af118cfb416939044fdb5411de + - path: output/stringtie/test.ballgown/i_data.ctab + md5sum: f01d94a7d0dcfad3bfab18ed50dad16c + - path: output/stringtie/test.ballgown/t_data.ctab + md5sum: 92a98902784e7406ffe054d2adbabc7c + - path: output/stringtie/test.coverage.gtf + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/stringtie/test.gene.abundance.txt + md5sum: 9708811bcefe0f6384293d6f419f3250 + - path: output/stringtie/test.transcripts.gtf + md5sum: 0e42709bfe30c2c7f2574ba664f5fa9f -- name: stringtie merge reverse-strand - command: nextflow run ./tests/modules/stringtie/merge/ -entry test_stringtie_reverse_merge -c tests/config/nextflow.config +- name: stringtie merge test_stringtie_reverse_merge + command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c tests/config/nextflow.config tags: - stringtie - stringtie/merge files: - - path: ./output/stringtie/stringtie.merged.gtf - contains: - - 'stringtie' - - 'merge' - - 'chr22' + - path: output/stringtie/stringtie.merged.gtf + md5sum: afc461bb3cbc368f268a7a45c1b54497 + - path: output/stringtie/test.ballgown/e2t.ctab + md5sum: 9ae42e056c955a88a883e5e917840d77 + - path: output/stringtie/test.ballgown/e_data.ctab + md5sum: fd8496d3957ade3b2c0853155f9a67da + - path: output/stringtie/test.ballgown/i2t.ctab + md5sum: 658131af118cfb416939044fdb5411de + - path: output/stringtie/test.ballgown/i_data.ctab + md5sum: f01d94a7d0dcfad3bfab18ed50dad16c + - path: output/stringtie/test.ballgown/t_data.ctab + md5sum: 92a98902784e7406ffe054d2adbabc7c + - path: output/stringtie/test.coverage.gtf + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/stringtie/test.gene.abundance.txt + md5sum: 94b85145d60ab1b80a7f0f6cf08418b0 + - path: output/stringtie/test.transcripts.gtf + md5sum: 3196e3d50fd461aae6408e0a70acae68 From b552958341a1c69382c7534f1a81c9ca011ddd6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Wed, 27 Oct 2021 15:48:49 +0200 Subject: [PATCH 167/314] add paraclu (#909) * add paraclu * remove TODOs * add min_cluster as input parameter, remove option parameters * add tool_dev_url --- modules/paraclu/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/paraclu/main.nf | 45 +++++++++++++++++++ modules/paraclu/meta.yml | 45 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/paraclu/main.nf | 15 +++++++ tests/modules/paraclu/test.yml | 7 +++ 6 files changed, 194 insertions(+) create mode 100644 modules/paraclu/functions.nf create mode 100644 modules/paraclu/main.nf create mode 100644 modules/paraclu/meta.yml create mode 100644 tests/modules/paraclu/main.nf create mode 100644 tests/modules/paraclu/test.yml diff --git a/modules/paraclu/functions.nf b/modules/paraclu/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/paraclu/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf new file mode 100644 index 00000000..6d65a784 --- /dev/null +++ b/modules/paraclu/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PARACLU { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::paraclu=10" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1" + } else { + container "quay.io/biocontainers/paraclu:10--h9a82719_1" + } + + input: + tuple val(meta), path(bed) + val(min_cluster) + + output: + tuple val(meta), path("*.bed"), emit: bed + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def VERSION=10 + """ + + awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P + sort -k1,1 -k3n ${bed}_4P > ${bed}_4Ps + paraclu $min_cluster ${bed}_4Ps > ${prefix}.clustered + paraclu-cut ${prefix}.clustered > ${prefix}.clustered.simplified + awk -F '\t' '{print \$1"\t"\$3"\t"\$4"\t"\$1":"\$3".."\$4","\$2"\t"\$6"\t"\$2}' ${prefix}.clustered.simplified > ${prefix}.clustered.simplified.bed + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: $VERSION + END_VERSIONS + """ +} diff --git a/modules/paraclu/meta.yml b/modules/paraclu/meta.yml new file mode 100644 index 00000000..a3424c57 --- /dev/null +++ b/modules/paraclu/meta.yml @@ -0,0 +1,45 @@ +name: paraclu +description: Paraclu finds clusters in data attached to sequences. +keywords: + - sort +tools: + - paraclu: + description: Paraclu finds clusters in data attached to sequences. + homepage: https://gitlab.com/mcfrith/paraclu + documentation: https://gitlab.com/mcfrith/paraclu + tool_dev_url: https://gitlab.com/mcfrith/paraclu + doi: "" + licence: ['GPL v3-or-later'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: BED file + pattern: "*.bed" + - min_cluster: + type: integer + description: Minimum size of cluster + pattern: "*.bed" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: clustered BED file + pattern: "*.bed" + +authors: + - "@mashehu" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 98a6204c..e77fe125 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -791,6 +791,10 @@ pangolin: - modules/pangolin/** - tests/modules/pangolin/** +paraclu: + - modules/paraclu/** + - tests/modules/paraclu/** + pbbam/pbmerge: - modules/pbbam/pbmerge/** - tests/modules/pbbam/pbmerge/** diff --git a/tests/modules/paraclu/main.nf b/tests/modules/paraclu/main.nf new file mode 100644 index 00000000..f5101591 --- /dev/null +++ b/tests/modules/paraclu/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PARACLU } from '../../../modules/paraclu/main.nf' addParams( options: [:] ) + +workflow test_paraclu { + + input = [[ id:'test' ], // meta map + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + min_cluster = 30 + + PARACLU ( input, min_cluster ) +} diff --git a/tests/modules/paraclu/test.yml b/tests/modules/paraclu/test.yml new file mode 100644 index 00000000..3aa3e8b4 --- /dev/null +++ b/tests/modules/paraclu/test.yml @@ -0,0 +1,7 @@ +- name: paraclu test_paraclu + command: nextflow run tests/modules/paraclu -entry test_paraclu -c tests/config/nextflow.config + tags: + - paraclu + files: + - path: output/paraclu/test.clustered.simplified.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e From 754db250a09d9bfac84e1c5c168420160080a701 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Wed, 27 Oct 2021 16:14:52 +0200 Subject: [PATCH 168/314] Add Manta/somatic module + (fix tiny strelka params problem, i know bad practice :( ) (#912) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * remove params statement * add manta/somatic module * fix strelka target bed thing * removing checksums should make this pass * Update modules/manta/somatic/main.nf Co-authored-by: Matthias Hörtenhuber * fix indentation Co-authored-by: Matthias Hörtenhuber --- modules/manta/somatic/functions.nf | 78 ++++++++++++++++++ modules/manta/somatic/main.nf | 67 +++++++++++++++ modules/manta/somatic/meta.yml | 103 ++++++++++++++++++++++++ modules/strelka/germline/main.nf | 8 +- modules/strelka/somatic/main.nf | 2 +- tests/config/pytest_modules.yml | 4 + tests/modules/manta/somatic/main.nf | 23 ++++++ tests/modules/manta/somatic/test.yml | 18 +++++ tests/modules/strelka/germline/main.nf | 34 ++++---- tests/modules/strelka/germline/test.yml | 17 ++-- 10 files changed, 326 insertions(+), 28 deletions(-) create mode 100644 modules/manta/somatic/functions.nf create mode 100644 modules/manta/somatic/main.nf create mode 100644 modules/manta/somatic/meta.yml create mode 100644 tests/modules/manta/somatic/main.nf create mode 100644 tests/modules/manta/somatic/test.yml diff --git a/modules/manta/somatic/functions.nf b/modules/manta/somatic/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/manta/somatic/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf new file mode 100644 index 00000000..16a30f17 --- /dev/null +++ b/modules/manta/somatic/main.nf @@ -0,0 +1,67 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MANTA_SOMATIC { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" + } else { + container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" + } + + input: + tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor) + path fasta + path fai + path target_bed + path target_bed_tbi + + output: + tuple val(meta), path("*.candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf + tuple val(meta), path("*.candidate_small_indels.vcf.gz.tbi") , emit: candidate_small_indels_vcf_tbi + tuple val(meta), path("*.candidate_sv.vcf.gz") , emit: candidate_sv_vcf + tuple val(meta), path("*.candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi + tuple val(meta), path("*.diploid_sv.vcf.gz") , emit: diploid_sv_vcf + tuple val(meta), path("*.diploid_sv.vcf.gz.tbi") , emit: diploid_sv_vcf_tbi + tuple val(meta), path("*.somatic_sv.vcf.gz") , emit: somatic_sv_vcf + tuple val(meta), path("*.somatic_sv.vcf.gz.tbi") , emit: somatic_sv_vcf_tbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" + + """ + configManta.py \ + --tumorBam $cram_tumor \ + --normalBam $cram_normal \ + --reference $fasta \ + $options_manta \ + --runDir manta + + python manta/runWorkflow.py -m local -j $task.cpus + + mv manta/results/variants/candidateSmallIndels.vcf.gz ${prefix}.candidate_small_indels.vcf.gz + mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi ${prefix}.candidate_small_indels.vcf.gz.tbi + mv manta/results/variants/candidateSV.vcf.gz ${prefix}.candidate_sv.vcf.gz + mv manta/results/variants/candidateSV.vcf.gz.tbi ${prefix}.candidate_sv.vcf.gz.tbi + mv manta/results/variants/diploidSV.vcf.gz ${prefix}.diploid_sv.vcf.gz + mv manta/results/variants/diploidSV.vcf.gz.tbi ${prefix}.diploid_sv.vcf.gz.tbi + mv manta/results/variants/somaticSV.vcf.gz ${prefix}.somatic_sv.vcf.gz + mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( configManta.py --version ) + END_VERSIONS + """ +} diff --git a/modules/manta/somatic/meta.yml b/modules/manta/somatic/meta.yml new file mode 100644 index 00000000..08103ba7 --- /dev/null +++ b/modules/manta/somatic/meta.yml @@ -0,0 +1,103 @@ +name: manta_somatic +description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs. +keywords: + - somatic + - wgs + - wxs + - panel + - vcf + - structural variants + - small indels +tools: + - manta: + description: Structural variant and indel caller for mapped sequencing data + homepage: https://github.com/Illumina/manta + documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md + tool_dev_url: https://github.com/Illumina/manta + doi: "10.1093/bioinformatics/btv710" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram_normal: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai_normal: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - cram_tumor: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai_tumor: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" + - target_bed: + type: file + description: BED file containing target regions for variant calling + pattern: "*.{bed}" + - target_bed_tbi: + type: file + description: Index for BED file containing target regions for variant calling + pattern: "*.{bed.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - candidate_small_indels_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_small_indels_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - candidate_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - diploid_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - diploid_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - somatic_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - somatic_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@FriederikeHanssen" diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 64a01e6c..4918bff7 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -23,17 +23,19 @@ process STRELKA_GERMLINE { path fasta path fai path target_bed + path target_bed_tbi + output: tuple val(meta), path("*variants.vcf.gz") , emit: vcf tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi tuple val(meta), path("*genome.vcf.gz") , emit: genome_vcf tuple val(meta), path("*genome.vcf.gz.tbi") , emit: genome_vcf_tbi - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def regions = params.target_bed ? "--exome --callRegions ${target_bed}" : "" + def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ --bam $bam \\ @@ -50,7 +52,7 @@ process STRELKA_GERMLINE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) + ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 35e7053f..4bedbca0 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -34,7 +34,7 @@ process STRELKA_SOMATIC { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def options_strelka = params.target_bed ? "--exome --callRegions ${target_bed}" : "" + def options_strelka = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaSomaticWorkflow.py \\ --tumor $cram_tumor \\ diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e77fe125..96e4a3c9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -678,6 +678,10 @@ maltextract: - modules/maltextract/** - tests/modules/maltextract/** +manta/somatic: + - modules/manta/somatic/** + - tests/modules/manta/somatic/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/manta/somatic/main.nf b/tests/modules/manta/somatic/main.nf new file mode 100644 index 00000000..553735c9 --- /dev/null +++ b/tests/modules/manta/somatic/main.nf @@ -0,0 +1,23 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' addParams( options: [:] ) + +workflow test_manta_somatic { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + + MANTA_SOMATIC ( input, fasta, fai, bed, bed_tbi ) +} diff --git a/tests/modules/manta/somatic/test.yml b/tests/modules/manta/somatic/test.yml new file mode 100644 index 00000000..72f0953d --- /dev/null +++ b/tests/modules/manta/somatic/test.yml @@ -0,0 +1,18 @@ +- name: manta somatic test_manta_somatic + command: nextflow run tests/modules/manta/somatic -entry test_manta_somatic -c tests/config/nextflow.config + tags: + - manta/somatic + - manta + files: + - path: output/manta/test.candidate_small_indels.vcf.gz + - path: output/manta/test.candidate_small_indels.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + - path: output/manta/test.candidate_sv.vcf.gz + - path: output/manta/test.candidate_sv.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + - path: output/manta/test.diploid_sv.vcf.gz + - path: output/manta/test.diploid_sv.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + - path: output/manta/test.somatic_sv.vcf.gz + - path: output/manta/test.somatic_sv.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 diff --git a/tests/modules/strelka/germline/main.nf b/tests/modules/strelka/germline/main.nf index 4ce4699a..0d5193bb 100644 --- a/tests/modules/strelka/germline/main.nf +++ b/tests/modules/strelka/germline/main.nf @@ -5,30 +5,32 @@ nextflow.enable.dsl = 2 include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] ) workflow test_strelka_germline { - input = [ + input = [ [ id:'test'], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), ] - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = [] - - STRELKA_GERMLINE ( input, fasta, fai, targets ) + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + target_bed = [] + target_bed_tbi = [] + + STRELKA_GERMLINE ( input, fasta, fai, target_bed, target_bed_tbi ) } workflow test_strelka_germline_target_bed { - input = [ + input = [ [ id:'test'], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + target_bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) - STRELKA_GERMLINE ( input, fasta, fai, targets ) + STRELKA_GERMLINE ( input, fasta, fai, target_bed, target_bed_tbi ) } diff --git a/tests/modules/strelka/germline/test.yml b/tests/modules/strelka/germline/test.yml index ac654ce8..a3ab3ef6 100644 --- a/tests/modules/strelka/germline/test.yml +++ b/tests/modules/strelka/germline/test.yml @@ -1,20 +1,21 @@ -- name: strelka germline - command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config +- name: strelka germline test_strelka_germline + command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config tags: - strelka - strelka/germline files: - - path: output/strelka/test.variants.vcf.gz - - path: output/strelka/test.variants.vcf.gz.tbi - path: output/strelka/test.genome.vcf.gz - path: output/strelka/test.genome.vcf.gz.tbi -- name: strelka germline target bed - command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config + - path: output/strelka/test.variants.vcf.gz + - path: output/strelka/test.variants.vcf.gz.tbi + +- name: strelka germline test_strelka_germline_target_bed + command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config tags: - strelka - strelka/germline files: - - path: output/strelka/test.variants.vcf.gz - - path: output/strelka/test.variants.vcf.gz.tbi - path: output/strelka/test.genome.vcf.gz - path: output/strelka/test.genome.vcf.gz.tbi + - path: output/strelka/test.variants.vcf.gz + - path: output/strelka/test.variants.vcf.gz.tbi From 12a3f80f3345f1b1215c7c396804c05c953b296b Mon Sep 17 00:00:00 2001 From: Daniel Straub <42973691+d4straub@users.noreply.github.com> Date: Wed, 27 Oct 2021 17:00:11 +0200 Subject: [PATCH 169/314] add filtlong (#919) --- modules/filtlong/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/filtlong/main.nf | 43 ++++++++++++++++++ modules/filtlong/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/filtlong/main.nf | 36 +++++++++++++++ tests/modules/filtlong/test.yml | 23 ++++++++++ 6 files changed, 234 insertions(+) create mode 100644 modules/filtlong/functions.nf create mode 100644 modules/filtlong/main.nf create mode 100644 modules/filtlong/meta.yml create mode 100644 tests/modules/filtlong/main.nf create mode 100644 tests/modules/filtlong/test.yml diff --git a/modules/filtlong/functions.nf b/modules/filtlong/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/filtlong/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf new file mode 100644 index 00000000..6e82f112 --- /dev/null +++ b/modules/filtlong/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FILTLONG { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::filtlong=0.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0" + } else { + container "quay.io/biocontainers/filtlong:0.2.1--h9a82719_0" + } + + input: + tuple val(meta), path(shortreads), path(longreads) + + output: + tuple val(meta), path("${meta.id}_lr_filtlong.fastq.gz"), emit: reads + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" + """ + filtlong \\ + $short_reads \\ + $options.args \\ + $longreads \\ + | gzip -n > ${prefix}_lr_filtlong.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( filtlong --version | sed -e "s/Filtlong v//g" ) + END_VERSIONS + """ +} diff --git a/modules/filtlong/meta.yml b/modules/filtlong/meta.yml new file mode 100644 index 00000000..7616a176 --- /dev/null +++ b/modules/filtlong/meta.yml @@ -0,0 +1,50 @@ +name: filtlong +description: Filtlong filters long reads based on quality measures or short read data. +keywords: + - nanopore + - quality control + - QC + - filtering + - long reads + - short reads +tools: + - filtlong: + description: Filtlong is a tool for filtering long reads. It can take a set of long reads and produce a smaller, better subset. It uses both read length (longer is better) and read identity (higher is better) when choosing which reads pass the filter. + homepage: https://anaconda.org/bioconda/filtlong + documentation: None + tool_dev_url: https://github.com/rrwick/Filtlong + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - shortreads: + type: file + description: fastq file + pattern: "*.{fq,fastq,fq.gz,fastq.gz}" + - longreads: + type: file + description: fastq file + pattern: "*.{fq,fastq,fq.gz,fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: Filtered (compressed) fastq file + pattern: "*.fastq.gz" + +authors: + - "@d4straub" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 96e4a3c9..813d8546 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -382,6 +382,10 @@ fgbio/sortbam: - modules/fgbio/sortbam/** - tests/modules/fgbio/sortbam/** +filtlong: + - modules/filtlong/** + - tests/modules/filtlong/** + flash: - modules/flash/** - tests/modules/flash/** diff --git a/tests/modules/filtlong/main.nf b/tests/modules/filtlong/main.nf new file mode 100644 index 00000000..cd037623 --- /dev/null +++ b/tests/modules/filtlong/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FILTLONG } from '../../../modules/filtlong/main.nf' addParams( options: [:] ) + +workflow test_filtlong { + + input = [ [ id:'test', single_end:false ], // meta map + [], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + FILTLONG ( input ) +} + +workflow test_filtlong_illumina_se { + + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + FILTLONG ( input ) +} + +workflow test_filtlong_illumina_pe { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + FILTLONG ( input ) +} diff --git a/tests/modules/filtlong/test.yml b/tests/modules/filtlong/test.yml new file mode 100644 index 00000000..30779d45 --- /dev/null +++ b/tests/modules/filtlong/test.yml @@ -0,0 +1,23 @@ +- name: filtlong test_filtlong + command: nextflow run tests/modules/filtlong -entry test_filtlong -c tests/config/nextflow.config + tags: + - filtlong + files: + - path: output/filtlong/test_lr_filtlong.fastq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + +- name: filtlong test_filtlong_illumina_se + command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_se -c tests/config/nextflow.config + tags: + - filtlong + files: + - path: output/filtlong/test_lr_filtlong.fastq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + +- name: filtlong test_filtlong_illumina_pe + command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_pe -c tests/config/nextflow.config + tags: + - filtlong + files: + - path: output/filtlong/test_lr_filtlong.fastq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a From 257078bb39a3c16b287f80362516e779a4ef6131 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Wed, 27 Oct 2021 17:49:54 +0200 Subject: [PATCH 170/314] Update strelka, by enabling BP if Manta candidates are present (#923) * add BP to strelka/somatic * merge conflicts * update strelka modules for BP * update strelka modules for BP * apply suggestions from code review --- modules/strelka/germline/main.nf | 2 +- modules/strelka/somatic/main.nf | 8 +++++--- modules/strelka/somatic/meta.yml | 8 ++++++++ tests/modules/strelka/somatic/main.nf | 23 ++++++++++++++++++++++- tests/modules/strelka/somatic/test.yml | 13 +++++++++++++ 5 files changed, 49 insertions(+), 5 deletions(-) diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 4918bff7..0d201940 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -52,7 +52,7 @@ process STRELKA_GERMLINE { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) + ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 4bedbca0..02bd5822 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -19,7 +19,7 @@ process STRELKA_SOMATIC { } input: - tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor) + tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) path fasta path fai path target_bed @@ -34,13 +34,15 @@ process STRELKA_SOMATIC { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def options_strelka = target_bed ? "--exome --callRegions ${target_bed}" : "" + def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" + def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ configureStrelkaSomaticWorkflow.py \\ --tumor $cram_tumor \\ --normal $cram_normal \\ --referenceFasta $fasta \\ - $options_strelka \\ + $options_target_bed \\ + $options_manta \\ $options.args \\ --runDir strelka diff --git a/modules/strelka/somatic/meta.yml b/modules/strelka/somatic/meta.yml index d9bd993a..ce5acb33 100644 --- a/modules/strelka/somatic/meta.yml +++ b/modules/strelka/somatic/meta.yml @@ -37,6 +37,14 @@ input: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" + - manta_candidate_small_indels: + type: file + description: VCF.gz file + pattern: "*.{vcf.gz}" + - manta_candidate_small_indels_tbi: + type: file + description: VCF.gz index file + pattern: "*.tbi" - fasta: type: file description: Genome reference FASTA file diff --git a/tests/modules/strelka/somatic/main.nf b/tests/modules/strelka/somatic/main.nf index 8dec808e..60127f58 100644 --- a/tests/modules/strelka/somatic/main.nf +++ b/tests/modules/strelka/somatic/main.nf @@ -11,7 +11,28 @@ workflow test_strelka_somatic { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + [],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + + STRELKA_SOMATIC (input, fasta, fai, bed, bed_tbi ) +} + +workflow test_strelka__best_practices_somatic { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/strelka/somatic/test.yml b/tests/modules/strelka/somatic/test.yml index f98b7232..b461d335 100644 --- a/tests/modules/strelka/somatic/test.yml +++ b/tests/modules/strelka/somatic/test.yml @@ -10,3 +10,16 @@ - path: output/strelka/test.somatic_snvs.vcf.gz - path: output/strelka/test.somatic_snvs.vcf.gz.tbi md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + +- name: strelka somatic test_strelka__best_practices_somatic + command: nextflow run tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c tests/config/nextflow.config + tags: + - strelka + - strelka/somatic + files: + - path: output/strelka/test.somatic_indels.vcf.gz + - path: output/strelka/test.somatic_indels.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 + - path: output/strelka/test.somatic_snvs.vcf.gz + - path: output/strelka/test.somatic_snvs.vcf.gz.tbi + md5sum: 4cb176febbc8c26d717a6c6e67b9c905 From 80d8e87fa4193c88c2f7de380d815422ebfbec83 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 27 Oct 2021 18:24:48 +0200 Subject: [PATCH 171/314] adding new manta/germline + manta/tumoronly modules (#906) * feat: all manta/single to modules * fix module name * fix module name * fix: changes from review comments * fix: test data --- modules/manta/germline/functions.nf | 78 +++++++++++++++++++++++ modules/manta/germline/main.nf | 68 ++++++++++++++++++++ modules/manta/germline/meta.yml | 87 +++++++++++++++++++++++++ modules/manta/tumoronly/functions.nf | 78 +++++++++++++++++++++++ modules/manta/tumoronly/main.nf | 68 ++++++++++++++++++++ modules/manta/tumoronly/meta.yml | 88 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 8 +++ tests/modules/manta/germline/main.nf | 35 ++++++++++ tests/modules/manta/germline/test.yml | 24 +++++++ tests/modules/manta/tumoronly/main.nf | 35 ++++++++++ tests/modules/manta/tumoronly/test.yml | 24 +++++++ 11 files changed, 593 insertions(+) create mode 100644 modules/manta/germline/functions.nf create mode 100644 modules/manta/germline/main.nf create mode 100644 modules/manta/germline/meta.yml create mode 100644 modules/manta/tumoronly/functions.nf create mode 100644 modules/manta/tumoronly/main.nf create mode 100644 modules/manta/tumoronly/meta.yml create mode 100644 tests/modules/manta/germline/main.nf create mode 100644 tests/modules/manta/germline/test.yml create mode 100644 tests/modules/manta/tumoronly/main.nf create mode 100644 tests/modules/manta/tumoronly/test.yml diff --git a/modules/manta/germline/functions.nf b/modules/manta/germline/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/manta/germline/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf new file mode 100644 index 00000000..ca2ac9dc --- /dev/null +++ b/modules/manta/germline/main.nf @@ -0,0 +1,68 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MANTA_GERMLINE { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" + } else { + container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" + } + + input: + tuple val(meta), path(cram), path(crai) + path fasta + path fai + path target_bed + path target_bed_tbi + + output: + tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf + tuple val(meta), path("*candidate_small_indels.vcf.gz.tbi"), emit: candidate_small_indels_vcf_tbi + tuple val(meta), path("*candidate_sv.vcf.gz") , emit: candidate_sv_vcf + tuple val(meta), path("*candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi + tuple val(meta), path("*diploid_sv.vcf.gz") , emit: diploid_sv_vcf + tuple val(meta), path("*diploid_sv.vcf.gz.tbi") , emit: diploid_sv_vcf_tbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" + """ + configManta.py \ + --bam $cram \ + --reference $fasta \ + $options_manta \ + --runDir manta + + python manta/runWorkflow.py -m local -j $task.cpus + + mv manta/results/variants/candidateSmallIndels.vcf.gz \ + ${prefix}.candidate_small_indels.vcf.gz + mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \ + ${prefix}.candidate_small_indels.vcf.gz.tbi + mv manta/results/variants/candidateSV.vcf.gz \ + ${prefix}.candidate_sv.vcf.gz + mv manta/results/variants/candidateSV.vcf.gz.tbi \ + ${prefix}.candidate_sv.vcf.gz.tbi + mv manta/results/variants/diploidSV.vcf.gz \ + ${prefix}.diploid_sv.vcf.gz + mv manta/results/variants/diploidSV.vcf.gz.tbi \ + ${prefix}.diploid_sv.vcf.gz.tbi + + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( configManta.py --version ) + END_VERSIONS + """ +} diff --git a/modules/manta/germline/meta.yml b/modules/manta/germline/meta.yml new file mode 100644 index 00000000..7933fd6c --- /dev/null +++ b/modules/manta/germline/meta.yml @@ -0,0 +1,87 @@ +name: manta_germline +description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs. +keywords: + - somatic + - wgs + - wxs + - panel + - vcf + - structural variants + - small indels +tools: + - manta: + description: Structural variant and indel caller for mapped sequencing data + homepage: https://github.com/Illumina/manta + documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md + tool_dev_url: https://github.com/Illumina/manta + doi: "10.1093/bioinformatics/btv710" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" + - target_bed: + type: file + description: BED file containing target regions for variant calling + pattern: "*.{bed}" + - target_bed_tbi: + type: file + description: Index for BED file containing target regions for variant calling + pattern: "*.{bed.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - candidate_small_indels_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_small_indels_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - candidate_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - diploid_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - diploid_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@maxulysse" diff --git a/modules/manta/tumoronly/functions.nf b/modules/manta/tumoronly/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/manta/tumoronly/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf new file mode 100644 index 00000000..a86279df --- /dev/null +++ b/modules/manta/tumoronly/main.nf @@ -0,0 +1,68 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MANTA_TUMORONLY { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" + } else { + container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" + } + + input: + tuple val(meta), path(cram), path(crai) + path fasta + path fai + path target_bed + path target_bed_tbi + + output: + tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf + tuple val(meta), path("*candidate_small_indels.vcf.gz.tbi"), emit: candidate_small_indels_vcf_tbi + tuple val(meta), path("*candidate_sv.vcf.gz") , emit: candidate_sv_vcf + tuple val(meta), path("*candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi + tuple val(meta), path("*tumor_sv.vcf.gz") , emit: tumor_sv_vcf + tuple val(meta), path("*tumor_sv.vcf.gz.tbi") , emit: tumor_sv_vcf_tbi + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" + """ + configManta.py \ + --tumorBam $cram \ + --reference $fasta \ + $options_manta \ + --runDir manta + + python manta/runWorkflow.py -m local -j $task.cpus + + mv manta/results/variants/candidateSmallIndels.vcf.gz \ + ${prefix}.candidate_small_indels.vcf.gz + mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \ + ${prefix}.candidate_small_indels.vcf.gz.tbi + mv manta/results/variants/candidateSV.vcf.gz \ + ${prefix}.candidate_sv.vcf.gz + mv manta/results/variants/candidateSV.vcf.gz.tbi \ + ${prefix}.candidate_sv.vcf.gz.tbi + mv manta/results/variants/tumorSV.vcf.gz \ + ${prefix}.tumor_sv.vcf.gz + mv manta/results/variants/tumorSV.vcf.gz.tbi \ + ${prefix}.tumor_sv.vcf.gz.tbi + + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( configManta.py --version ) + END_VERSIONS + """ +} diff --git a/modules/manta/tumoronly/meta.yml b/modules/manta/tumoronly/meta.yml new file mode 100644 index 00000000..d4af9402 --- /dev/null +++ b/modules/manta/tumoronly/meta.yml @@ -0,0 +1,88 @@ +name: manta_tumoronly +description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs. +keywords: + - somatic + - wgs + - wxs + - panel + - vcf + - structural variants + - small indels +tools: + - manta: + description: Structural variant and indel caller for mapped sequencing data + homepage: https://github.com/Illumina/manta + documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md + tool_dev_url: https://github.com/Illumina/manta + doi: "10.1093/bioinformatics/btv710" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - crai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" + - target_bed: + type: file + description: BED file containing target regions for variant calling + pattern: "*.{bed}" + - target_bed_tbi: + type: file + description: Index for BED file containing target regions for variant calling + pattern: "*.{bed.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + + - candidate_small_indels_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_small_indels_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - candidate_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - candidate_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - tumor_sv_vcf: + type: file + description: Gzipped VCF file containing variants + pattern: "*.{vcf.gz}" + - tumor_sv_vcf_tbi: + type: file + description: Index for gzipped VCF file containing variants + pattern: "*.{vcf.gz.tbi}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@maxulysse" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 813d8546..d1e47803 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -682,10 +682,18 @@ maltextract: - modules/maltextract/** - tests/modules/maltextract/** +manta/germline: + - modules/manta/germline/** + - tests/modules/manta/germline/** + manta/somatic: - modules/manta/somatic/** - tests/modules/manta/somatic/** +manta/tumoronly: + - modules/manta/tumoronly/** + - tests/modules/manta/tumoronly/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf new file mode 100644 index 00000000..df996464 --- /dev/null +++ b/tests/modules/manta/germline/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' addParams( options: [:] ) + +workflow test_manta_germline { + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = [] + bed_tbi = [] + + MANTA_GERMLINE ( input, fasta, fai, bed, bed_tbi ) +} + +workflow test_manta_germline_target_bed { + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + + MANTA_GERMLINE ( input, fasta, fai, bed, bed_tbi ) +} diff --git a/tests/modules/manta/germline/test.yml b/tests/modules/manta/germline/test.yml new file mode 100644 index 00000000..b4086d76 --- /dev/null +++ b/tests/modules/manta/germline/test.yml @@ -0,0 +1,24 @@ +- name: manta germline + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c tests/config/nextflow.config + tags: + - manta + - manta/germline + files: + - path: output/manta/test.candidate_small_indels.vcf.gz + - path: output/manta/test.candidate_small_indels.vcf.gz.tbi + - path: output/manta/test.candidate_sv.vcf.gz + - path: output/manta/test.candidate_sv.vcf.gz.tbi + - path: output/manta/test.diploid_sv.vcf.gz + - path: output/manta/test.diploid_sv.vcf.gz.tbi +- name: manta germline target bed + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c tests/config/nextflow.config + tags: + - manta + - manta/germline + files: + - path: output/manta/test.candidate_small_indels.vcf.gz + - path: output/manta/test.candidate_small_indels.vcf.gz.tbi + - path: output/manta/test.candidate_sv.vcf.gz + - path: output/manta/test.candidate_sv.vcf.gz.tbi + - path: output/manta/test.diploid_sv.vcf.gz + - path: output/manta/test.diploid_sv.vcf.gz.tbi diff --git a/tests/modules/manta/tumoronly/main.nf b/tests/modules/manta/tumoronly/main.nf new file mode 100644 index 00000000..436ab781 --- /dev/null +++ b/tests/modules/manta/tumoronly/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' addParams( options: [:] ) + +workflow test_manta_tumoronly { + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = [] + bed_tbi = [] + + MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi ) +} + +workflow test_manta_tumoronly_target_bed { + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + + MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi ) +} diff --git a/tests/modules/manta/tumoronly/test.yml b/tests/modules/manta/tumoronly/test.yml new file mode 100644 index 00000000..13f2cde1 --- /dev/null +++ b/tests/modules/manta/tumoronly/test.yml @@ -0,0 +1,24 @@ +- name: manta tumoronly + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c tests/config/nextflow.config + tags: + - manta + - manta/tumoronly + files: + - path: output/manta/test.candidate_small_indels.vcf.gz + - path: output/manta/test.candidate_small_indels.vcf.gz.tbi + - path: output/manta/test.candidate_sv.vcf.gz + - path: output/manta/test.candidate_sv.vcf.gz.tbi + - path: output/manta/test.tumor_sv.vcf.gz + - path: output/manta/test.tumor_sv.vcf.gz.tbi +- name: manta tumoronly target bed + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c tests/config/nextflow.config + tags: + - manta + - manta/tumoronly + files: + - path: output/manta/test.candidate_small_indels.vcf.gz + - path: output/manta/test.candidate_small_indels.vcf.gz.tbi + - path: output/manta/test.candidate_sv.vcf.gz + - path: output/manta/test.candidate_sv.vcf.gz.tbi + - path: output/manta/test.tumor_sv.vcf.gz + - path: output/manta/test.tumor_sv.vcf.gz.tbi From a0bc08732c09fcb097b84c96f1a5d7eb507e9bf6 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Wed, 27 Oct 2021 19:06:06 +0200 Subject: [PATCH 172/314] Rewritten module fgbio/fastqtobam (#916) * added template for fastqtobam * porting old code into new template * update with missing getprocessname function * test completed - updating all * fixed linting issues * improved reading Co-authored-by: FriederikeHanssen Co-authored-by: FriederikeHanssen --- modules/fgbio/fastqtobam/functions.nf | 78 +++++++++++++++++++++++++ modules/fgbio/fastqtobam/main.nf | 51 ++++++++++++++++ modules/fgbio/fastqtobam/meta.yml | 47 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fgbio/fastqtobam/main.nf | 16 +++++ tests/modules/fgbio/fastqtobam/test.yml | 10 ++++ 6 files changed, 206 insertions(+) create mode 100644 modules/fgbio/fastqtobam/functions.nf create mode 100644 modules/fgbio/fastqtobam/main.nf create mode 100644 modules/fgbio/fastqtobam/meta.yml create mode 100644 tests/modules/fgbio/fastqtobam/main.nf create mode 100644 tests/modules/fgbio/fastqtobam/test.yml diff --git a/modules/fgbio/fastqtobam/functions.nf b/modules/fgbio/fastqtobam/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fgbio/fastqtobam/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf new file mode 100644 index 00000000..68a85508 --- /dev/null +++ b/modules/fgbio/fastqtobam/main.nf @@ -0,0 +1,51 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FGBIO_FASTQTOBAM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" + } else { + container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" + } + + input: + tuple val(meta), path(reads) + val(read_structure) + + output: + tuple val(meta), path("*_umi_converted.bam"), emit: umibam + path "versions.yml" , emit: version + + script: + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + mkdir tmpFolder + + fgbio \\ + --tmp-dir=${PWD}/tmpFolder \\ + FastqToBam \\ + -i $reads \\ + -o "${prefix}_umi_converted.bam" \\ + --read-structures $read_structure \\ + --sample $meta.id \\ + --library $meta.id \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + END_VERSIONS + """ +} diff --git a/modules/fgbio/fastqtobam/meta.yml b/modules/fgbio/fastqtobam/meta.yml new file mode 100644 index 00000000..e356d315 --- /dev/null +++ b/modules/fgbio/fastqtobam/meta.yml @@ -0,0 +1,47 @@ +name: fgbio_fastqtobam +description: | + Using the FGBIO tools, converts FASTQ files sequenced with UMIs into BAM files, moving the UMI barcode into the RX field of the BAM file +keywords: + - fastqtobam + - fgbio +tools: + - fgbio: + description: A set of tools for working with genomic and high throughput sequencing data, including UMIs + homepage: http://fulcrumgenomics.github.io/fgbio/ + documentation: http://fulcrumgenomics.github.io/fgbio/tools/latest/ + tool_dev_url: https://github.com/fulcrumgenomics/fgbio + doi: "" + licence: ['MIT'] + +input: + - reads: + type: file + description: pair of reads to be converted into BAM file + pattern: "*.{fastq.gz}" + + - read_structure: + type: string + description: | + A read structure should always be provided for each of the fastq files. + If single end, the string will contain only one structure (i.e. "2M11S+T"), if paired-end the string + will contain two structures separated by a blank space (i.e. "2M11S+T 2M11S+T"). + If the read does not contain any UMI, the structure will be +T (i.e. only template of any length). + https://github.com/fulcrumgenomics/fgbio/wiki/Read-Structures + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.yml}" + - umibam: + type: file + description: Converted, unsorted BAM file with RX tag reporting UMI sequence (if any) + pattern: "*.{bam}" + +authors: + - "@lescai" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d1e47803..524027a4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -378,6 +378,10 @@ fgbio/callmolecularconsensusreads: - modules/fgbio/callmolecularconsensusreads/** - tests/modules/fgbio/callmolecularconsensusreads/** +fgbio/fastqtobam: + - modules/fgbio/fastqtobam/** + - tests/modules/fgbio/fastqtobam/** + fgbio/sortbam: - modules/fgbio/sortbam/** - tests/modules/fgbio/sortbam/** diff --git a/tests/modules/fgbio/fastqtobam/main.nf b/tests/modules/fgbio/fastqtobam/main.nf new file mode 100644 index 00000000..ce2f7efc --- /dev/null +++ b/tests/modules/fgbio/fastqtobam/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 +params.read_structure = "+T 12M11S+T" + +include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' addParams( options: [:] ) + +workflow test_fgbio_fastqtobam { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) ] + ] + + FGBIO_FASTQTOBAM ( input, "${params.read_structure}" ) +} diff --git a/tests/modules/fgbio/fastqtobam/test.yml b/tests/modules/fgbio/fastqtobam/test.yml new file mode 100644 index 00000000..6f2554e9 --- /dev/null +++ b/tests/modules/fgbio/fastqtobam/test.yml @@ -0,0 +1,10 @@ +- name: fgbio fastqtobam test_fgbio_fastqtobam + command: nextflow run tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c tests/config/nextflow.config + tags: + - fgbio/fastqtobam + - fgbio + files: + - path: output/fgbio/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: output/fgbio/versions.yml + md5sum: 524815093b96759060d0d800fc6a3f25 From e27553b989e1de428f9f0fa3dd8103590995a9a4 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 28 Oct 2021 12:10:21 +0200 Subject: [PATCH 173/314] Add module: `dedup` (#907) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add dedup (tests and version not working) * Fix dedup and tests Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- modules/dedup/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/dedup/main.nf | 47 ++++++++++++++++++++ modules/dedup/meta.yml | 60 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/dedup/main.nf | 13 ++++++ tests/modules/dedup/test.yml | 13 ++++++ 6 files changed, 215 insertions(+) create mode 100644 modules/dedup/functions.nf create mode 100644 modules/dedup/main.nf create mode 100644 modules/dedup/meta.yml create mode 100644 tests/modules/dedup/main.nf create mode 100644 tests/modules/dedup/test.yml diff --git a/modules/dedup/functions.nf b/modules/dedup/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/dedup/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf new file mode 100644 index 00000000..62d720f6 --- /dev/null +++ b/modules/dedup/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DEDUP { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::dedup=0.12.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1" + } else { + container "quay.io/biocontainers/dedup:0.12.8--hdfd78af_1" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*_rmdup.bam"), emit: bam // _rmdup is hardcoded output from dedup + tuple val(meta), path("*.json") , emit: json + tuple val(meta), path("*.hist") , emit: hist + tuple val(meta), path("*log") , emit: log + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + dedup \\ + -Xmx${task.memory.toGiga()}g \\ + -i $bam \\ + -o . \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') + + END_VERSIONS + """ +} diff --git a/modules/dedup/meta.yml b/modules/dedup/meta.yml new file mode 100644 index 00000000..0ddd648f --- /dev/null +++ b/modules/dedup/meta.yml @@ -0,0 +1,60 @@ +name: dedup +description: DeDup is a tool for read deduplication in paired-end read merging (e.g. for ancient DNA experiments). +keywords: + - dedup + - deduplication + - pcr duplicates + - ancient DNA + - paired-end + - bam +tools: + - dedup: + description: DeDup is a tool for read deduplication in paired-end read merging (e.g. for ancient DNA experiments). + homepage: https://github.com/apeltzer/DeDup + documentation: https://dedup.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/apeltzer/DeDup + doi: "10.1186/s13059-016-0918-z" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/SAM file + pattern: "*.{bam,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Deduplicated BAM file + pattern: "*_rmdup.bam" + - json: + type: file + description: JSON file for MultiQC + pattern: "*.json" + - hist: + type: file + description: Histogram data of amount of deduplication + pattern: "*.hist" + - log: + type: file + description: Dedup log information + pattern: "*log" + + + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 524027a4..8577b15f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,6 +294,10 @@ damageprofiler: - modules/damageprofiler/** - tests/modules/damageprofiler/** +dedup: + - modules/dedup/** + - tests/modules/dedup/** + deeptools/computematrix: - modules/deeptools/computematrix/** - tests/modules/deeptools/computematrix/** diff --git a/tests/modules/dedup/main.nf b/tests/modules/dedup/main.nf new file mode 100644 index 00000000..37e8e5c2 --- /dev/null +++ b/tests/modules/dedup/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DEDUP } from '../../../modules/dedup/main.nf' addParams( options: [args: "-m"] ) + +workflow test_dedup { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + + DEDUP ( input ) +} diff --git a/tests/modules/dedup/test.yml b/tests/modules/dedup/test.yml new file mode 100644 index 00000000..b35cfafd --- /dev/null +++ b/tests/modules/dedup/test.yml @@ -0,0 +1,13 @@ +- name: dedup test_dedup + command: nextflow run tests/modules/dedup -entry test_dedup -c tests/config/nextflow.config + tags: + - dedup + files: + - path: output/dedup/test.paired_end.dedup.json + md5sum: 2def0b54aba1fafa21b274f260de1b6f + - path: output/dedup/test.paired_end.hist + md5sum: df3492273a1db0d8152e35d9d5e38aa6 + - path: output/dedup/test.paired_end.log + md5sum: 4b8855bd63b2f4b37da4cfb17e61fb00 + - path: output/dedup/test.paired_end_rmdup.bam + md5sum: 8b0408fe3e258989095303a47e5b5061 From 263bbe56d2fde83d8d2f1d14f51b1763e2fda8a8 Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Thu, 28 Oct 2021 12:53:21 +0200 Subject: [PATCH 174/314] add new module samtools/depth (#950) * add new module samtools_depth * fixed main.nf for samtools/depth * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/samtools/depth/functions.nf | 78 +++++++++++++++++++++++++++ modules/samtools/depth/main.nf | 43 +++++++++++++++ modules/samtools/depth/meta.yml | 44 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/depth/main.nf | 13 +++++ tests/modules/samtools/depth/test.yml | 8 +++ 6 files changed, 190 insertions(+) create mode 100644 modules/samtools/depth/functions.nf create mode 100644 modules/samtools/depth/main.nf create mode 100644 modules/samtools/depth/meta.yml create mode 100644 tests/modules/samtools/depth/main.nf create mode 100644 tests/modules/samtools/depth/test.yml diff --git a/modules/samtools/depth/functions.nf b/modules/samtools/depth/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/samtools/depth/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf new file mode 100644 index 00000000..9c46b011 --- /dev/null +++ b/modules/samtools/depth/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMTOOLS_DEPTH { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" + } else { + container "quay.io/biocontainers/samtools:1.14--hb421002_0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + samtools \\ + depth \\ + $options.args \\ + -o ${prefix}.tsv \\ + $bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samtools/depth/meta.yml b/modules/samtools/depth/meta.yml new file mode 100644 index 00000000..a46fd332 --- /dev/null +++ b/modules/samtools/depth/meta.yml @@ -0,0 +1,44 @@ +name: samtools_depth +description: Computes the depth at each position or region. +keywords: + - depth + - samtools + - statistics + - coverage +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files; samtools depth – computes the read depth at each position or region + homepage: http://www.htslib.org + documentation: http://www.htslib.org/doc/samtools-depth.html + tool_dev_url: https://github.com/samtools/samtools + doi: "10.1093/bioinformatics/btp352" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: sorted BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: The output of samtools depth has three columns - the name of the contig or chromosome, the position and the number of reads aligned at that position + pattern: "*.{tsv}" + +authors: + - "@louperelo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8577b15f..e6913ef3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -967,6 +967,10 @@ samtools/ampliconclip: - modules/samtools/ampliconclip/** - tests/modules/samtools/ampliconclip/** +samtools/depth: + - modules/samtools/depth/** + - tests/modules/samtools/depth/** + samtools/faidx: - modules/samtools/faidx/** - tests/modules/samtools/faidx/** diff --git a/tests/modules/samtools/depth/main.nf b/tests/modules/samtools/depth/main.nf new file mode 100644 index 00000000..90497534 --- /dev/null +++ b/tests/modules/samtools/depth/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' addParams( options: [:] ) + +workflow test_samtools_depth { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) ] + + SAMTOOLS_DEPTH ( input ) +} diff --git a/tests/modules/samtools/depth/test.yml b/tests/modules/samtools/depth/test.yml new file mode 100644 index 00000000..4d5007c8 --- /dev/null +++ b/tests/modules/samtools/depth/test.yml @@ -0,0 +1,8 @@ +- name: samtools depth + command: nextflow run tests/modules/samtools/depth -entry test_samtools_depth -c tests/config/nextflow.config + tags: + - samtools/depth + - samtools + files: + - path: output/samtools/test.tsv + md5sum: aa27ebf69663ebded553b4d6538219d9 From 0b0f87c2f7d1f290ea65e3e6dbbab38e02e95a69 Mon Sep 17 00:00:00 2001 From: Gisela Gabernet Date: Thu, 28 Oct 2021 13:21:36 +0200 Subject: [PATCH 175/314] add `porechop` module (#914) * add module main porechop * update porechop main * add porechop functions * update meta porechop * add test main porechop * add porechop pytest yml * add porechop test.yml * Update modules/porechop/meta.yml Co-authored-by: Robert A. Petit III * re-add porechop avoid conflict * Update modules/porechop/meta.yml Co-authored-by: Robert A. Petit III * fix prefix suffix Co-authored-by: Robert A. Petit III --- modules/porechop/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/porechop/main.nf | 42 ++++++++++++++++++ modules/porechop/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/porechop/main.nf | 13 ++++++ tests/modules/porechop/test.yml | 7 +++ 6 files changed, 194 insertions(+) create mode 100644 modules/porechop/functions.nf create mode 100644 modules/porechop/main.nf create mode 100644 modules/porechop/meta.yml create mode 100644 tests/modules/porechop/main.nf create mode 100644 tests/modules/porechop/test.yml diff --git a/modules/porechop/functions.nf b/modules/porechop/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/porechop/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf new file mode 100644 index 00000000..cf564938 --- /dev/null +++ b/modules/porechop/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PORECHOP { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::porechop=0.2.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2" + } else { + container "quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.fastq.gz") , emit: reads + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + porechop \\ + -i ${reads} \\ + -t ${task.cpus} \\ + ${options.args} \\ + -o ${prefix}.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( porechop --version ) + END_VERSIONS + """ +} diff --git a/modules/porechop/meta.yml b/modules/porechop/meta.yml new file mode 100644 index 00000000..81399d28 --- /dev/null +++ b/modules/porechop/meta.yml @@ -0,0 +1,50 @@ +name: porechop +description: Adapter removal and demultiplexing of Oxford Nanopore reads +keywords: + - adapter + - nanopore + - demultiplexing +tools: + - porechop: + description: Adapter removal and demultiplexing of Oxford Nanopore reads + homepage: "https://github.com/rrwick/Porechop" + documentation: "https://github.com/rrwick/Porechop" + tool_dev_url: "https://github.com/rrwick/Porechop" + doi: "10.1099/mgen.0.000132" + licence: ["GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: fastq/fastq.gz file + pattern: "*.{fastq,fastq.gz,fq,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: Demultiplexed and/or adapter-trimmed fastq.gz file + pattern: "*.{fastq.gz}" + +authors: + - "@ggabernet" + - "@jasmezz" + - "@d4straub" + - "@LaurenceKuhl" + - "@SusiJo" + - "@jonasscheid" + - "@jonoave" + - "@GokceOGUZ" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e6913ef3..c3aa3c93 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -863,6 +863,10 @@ plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +porechop: + - modules/porechop/** + - tests/modules/porechop/** + preseq/lcextrap: - modules/preseq/lcextrap/** - tests/modules/preseq/lcextrap/** diff --git a/tests/modules/porechop/main.nf b/tests/modules/porechop/main.nf new file mode 100644 index 00000000..b6d7bafa --- /dev/null +++ b/tests/modules/porechop/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PORECHOP } from '../../../modules/porechop/main.nf' addParams( options: [args: '', suffix: '_porechop'] ) + +workflow test_porechop { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + + PORECHOP ( input ) +} diff --git a/tests/modules/porechop/test.yml b/tests/modules/porechop/test.yml new file mode 100644 index 00000000..b37a7ec4 --- /dev/null +++ b/tests/modules/porechop/test.yml @@ -0,0 +1,7 @@ +- name: porechop test_porechop + command: nextflow run tests/modules/porechop -entry test_porechop -c tests/config/nextflow.config + tags: + - porechop + files: + - path: output/porechop/test_porechop.fastq.gz + md5sum: 08f314ae9f162c8dcc27e5b513d2064d From d5183a7fec77de467750913c3abcb81cbdf63f07 Mon Sep 17 00:00:00 2001 From: Gisela Gabernet Date: Thu, 28 Oct 2021 13:33:57 +0200 Subject: [PATCH 176/314] new module `cellranger mkref` (#896) * add cellranger mkref module * add cellranger mkref tests * update test yml chksum * fix module linting * fix test yml * fix getprocessname * fix versions typo * fix cellranger test.yml * fix versions.yml * test versions.yml * fix grep version * fix cellranger version * add dockerfile and readme * review container statement * Update modules/cellranger/mkref/meta.yml Co-authored-by: Gregor Sturm * add disclaimers * change location dockerfile Co-authored-by: Gregor Sturm --- modules/cellranger/Dockerfile | 21 +++++ modules/cellranger/mkref/functions.nf | 78 ++++++++++++++++++ modules/cellranger/mkref/main.nf | 40 +++++++++ modules/cellranger/mkref/meta.yml | 39 +++++++++ modules/cellranger/readme.md | 18 ++++ ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 3558 -> 0 bytes tests/config/pytest_modules.yml | 4 + tests/modules/cellranger/mkref/main.nf | 16 ++++ tests/modules/cellranger/mkref/test.yml | 43 ++++++++++ 9 files changed, 259 insertions(+) create mode 100644 modules/cellranger/Dockerfile create mode 100644 modules/cellranger/mkref/functions.nf create mode 100644 modules/cellranger/mkref/main.nf create mode 100644 modules/cellranger/mkref/meta.yml create mode 100644 modules/cellranger/readme.md delete mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc create mode 100644 tests/modules/cellranger/mkref/main.nf create mode 100644 tests/modules/cellranger/mkref/test.yml diff --git a/modules/cellranger/Dockerfile b/modules/cellranger/Dockerfile new file mode 100644 index 00000000..aced4233 --- /dev/null +++ b/modules/cellranger/Dockerfile @@ -0,0 +1,21 @@ +FROM continuumio/miniconda3:4.8.2 +LABEL authors="Gisela Gabernet " \ + description="Docker image containing Cell Ranger" +# Disclaimer: this container is not provided nor supported by 10x Genomics. + +# Install procps and clean apt cache +RUN apt-get update \ + && apt-get install -y procps \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* + +# Copy pre-downloaded cellranger file +ENV CELLRANGER_VER 6.0.2 +COPY cellranger-$CELLRANGER_VER.tar.gz /opt/cellranger-$CELLRANGER_VER.tar.gz + +# Install cellranger +RUN \ + cd /opt && \ + tar -xzvf cellranger-$CELLRANGER_VER.tar.gz && \ + export PATH=/opt/cellranger-$CELLRANGER_VER:$PATH && \ + ln -s /opt/cellranger-$CELLRANGER_VER/cellranger /usr/bin/cellranger && \ + rm -rf /opt/cellranger-$CELLRANGER_VER.tar.gz diff --git a/modules/cellranger/mkref/functions.nf b/modules/cellranger/mkref/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cellranger/mkref/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cellranger/mkref/main.nf b/modules/cellranger/mkref/main.nf new file mode 100644 index 00000000..22ad66ba --- /dev/null +++ b/modules/cellranger/mkref/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CELLRANGER_MKREF { + tag 'mkref' + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + path fasta + path gtf + val(reference_name) + + output: + path "versions.yml" , emit: versions + path "${reference_name}", emit: reference + + script: + """ + cellranger mkref \\ + --genome=${reference_name} \\ + --fasta=${fasta} \\ + --genes=${gtf} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkref/meta.yml b/modules/cellranger/mkref/meta.yml new file mode 100644 index 00000000..9b849af7 --- /dev/null +++ b/modules/cellranger/mkref/meta.yml @@ -0,0 +1,39 @@ +name: cellranger_mkref +description: Module to build the reference needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkref command. +keywords: + - reference + - mkref + - index +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA + +input: + - fasta: + type: file + description: fasta genome file + pattern: "*.{fasta,fa}" + - gtf: + type: file + description: gtf transcriptome file + pattern: "*.gtf" + - reference_name: + type: val + description: name to give the reference folder + pattern: str + +output: + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger +authors: + - "@ggabernet" diff --git a/modules/cellranger/readme.md b/modules/cellranger/readme.md new file mode 100644 index 00000000..ed8ccb73 --- /dev/null +++ b/modules/cellranger/readme.md @@ -0,0 +1,18 @@ +# Updating the docker container and making a new module release + +Cell Ranger is a commercial tool by 10X Genomics. The container provided for the cellranger nf-core module is not provided nor supported by 10x Genomics. Updating the Cell Ranger version in the container and pushing the update to Dockerhub needs to be done manually. + +1. Navigate to the [Cell Ranger download page](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest) and download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. + +2. Edit the Dockerfile: update the Cell Ranger version in this line: + + ```bash + ENV CELLRANGER_VER + ``` + +3. Create the container: + + ```bash + docker build . -t nfcore/cellranger: + docker push nfcore/cellranger: + ``` diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc deleted file mode 100644 index 33acb8369a1bc62b5e66e1ed80e2247dd0e2759f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Thu, 28 Oct 2021 14:24:52 +0200 Subject: [PATCH 177/314] Updating test file for optitype (#782) * Provide an exisiting bam file for optitype * Update main.nf Attempt at fixing this with new testing data * Trying slightly different approach * Mini fixes, not sure whats wrong here * Add bam file with NM tags in all reads for optitype Co-authored-by: Alexander Peltzer Co-authored-by: Alexander Peltzer --- tests/config/test_data.config | 1 + tests/modules/optitype/main.nf | 4 ++-- tests/modules/optitype/test.yml | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 858e7737..e8729b9b 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -135,6 +135,7 @@ params { test_paired_end_umi_histogram_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_histogram.txt" test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam" test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam" + test_paired_end_hla = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/example_hla_pe.bam" test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam" test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai" diff --git a/tests/modules/optitype/main.nf b/tests/modules/optitype/main.nf index 7d740473..c27a5c99 100644 --- a/tests/modules/optitype/main.nf +++ b/tests/modules/optitype/main.nf @@ -6,8 +6,8 @@ include { OPTITYPE } from '../../../modules/optitype/main.nf' addParams( options workflow test_optitype { input = [ [ id:'test', seq_type:'dna' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_hla'], checkIfExists: true) ] - + OPTITYPE ( input ) } diff --git a/tests/modules/optitype/test.yml b/tests/modules/optitype/test.yml index 5ee5a067..41f35988 100644 --- a/tests/modules/optitype/test.yml +++ b/tests/modules/optitype/test.yml @@ -3,5 +3,7 @@ tags: - optitype files: - - path: output/optitype/test/test_result.tsv - path: output/optitype/test/test_coverage_plot.pdf + - path: output/optitype/test/test_result.tsv + contains: + - '1446' From 94025957118a2e1a3c72a3d8d6971777327e0315 Mon Sep 17 00:00:00 2001 From: emnilsson Date: Thu, 28 Oct 2021 16:53:33 +0200 Subject: [PATCH 178/314] New module seqtk/mergepe (#951) * First step into creating a seqtk/mergepe module to interleave fastQ input * First rewrite of main.nf to make the module perform the desired (interleave/merge of pe reads) task * Modifications to test the new seqtk/mergepe module. * Improving the seqtk/mergepe module to output single end reads as well, and making sure tests work * Modified so that gzip uses -n and that single read entries are symlinked with ln -s instead of copied, therefore updated test.yml as well. * Fix trailing whitespaces Co-authored-by: Daniel Lundin --- modules/seqtk/mergepe/functions.nf | 78 ++++++++++++++++++++++++++++ modules/seqtk/mergepe/main.nf | 53 +++++++++++++++++++ modules/seqtk/mergepe/meta.yml | 40 ++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/seqtk/mergepe/main.nf | 31 +++++++++++ tests/modules/seqtk/mergepe/test.yml | 17 ++++++ 6 files changed, 223 insertions(+) create mode 100644 modules/seqtk/mergepe/functions.nf create mode 100644 modules/seqtk/mergepe/main.nf create mode 100644 modules/seqtk/mergepe/meta.yml create mode 100644 tests/modules/seqtk/mergepe/main.nf create mode 100644 tests/modules/seqtk/mergepe/test.yml diff --git a/modules/seqtk/mergepe/functions.nf b/modules/seqtk/mergepe/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/seqtk/mergepe/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf new file mode 100644 index 00000000..fb8eb382 --- /dev/null +++ b/modules/seqtk/mergepe/main.nf @@ -0,0 +1,53 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SEQTK_MERGEPE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" + } else { + container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.fastq.gz"), emit: reads + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if (meta.single_end) { + """ + ln -s ${reads} ${prefix}.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ + } else { + """ + seqtk \\ + mergepe \\ + $options.args \\ + ${reads} \\ + | gzip -n >> ${prefix}.fastq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ + } +} diff --git a/modules/seqtk/mergepe/meta.yml b/modules/seqtk/mergepe/meta.yml new file mode 100644 index 00000000..a342f60b --- /dev/null +++ b/modules/seqtk/mergepe/meta.yml @@ -0,0 +1,40 @@ +name: seqtk_mergepe +description: Interleave pair-end reads from FastQ files +keywords: + - interleave +tools: + - seqtk: + description: Seqtk is a fast and lightweight tool for processing sequences in the FASTA or FASTQ format. Seqtk mergepe command merges pair-end reads into one interleaved file. + homepage: https://github.com/lh3/seqtk + documentation: https://docs.csc.fi/apps/seqtk/ + tool_dev_url: https://github.com/lh3/seqtk + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input FastQ files of size 1 and 2 for single-end and paired-end data,respectively. + pattern: "*.{fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: If single-end reads, the output is the same as the input, 1 FastQ file for each read. If pair-end reads, the read pairs will be interleaved and output as 1 FastQ file for each read pair. + pattern: "*.{fastq.gz}" + +authors: + - "@emnilsson" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c613ed80..5b371fe5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1023,6 +1023,10 @@ seacr/callpeak: - modules/seacr/callpeak/** - tests/modules/seacr/callpeak/** +seqtk/mergepe: + - modules/seqtk/mergepe/** + - tests/modules/seqtk/mergepe/** + seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** diff --git a/tests/modules/seqtk/mergepe/main.nf b/tests/modules/seqtk/mergepe/main.nf new file mode 100644 index 00000000..13654dc6 --- /dev/null +++ b/tests/modules/seqtk/mergepe/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [ 'suffix':'.processed' ] ) + +// +// Test with single-end data +// + +workflow test_seqtk_mergepe_single_end { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + SEQTK_MERGEPE ( input ) +} + +// +// Test with paired-end data +// + +workflow test_seqtk_mergepe_paired_end { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + SEQTK_MERGEPE ( input ) +} diff --git a/tests/modules/seqtk/mergepe/test.yml b/tests/modules/seqtk/mergepe/test.yml new file mode 100644 index 00000000..8ae95354 --- /dev/null +++ b/tests/modules/seqtk/mergepe/test.yml @@ -0,0 +1,17 @@ +- name: seqtk mergepe test_seqtk_mergepe_single_end + command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c tests/config/nextflow.config + tags: + - seqtk/mergepe + - seqtk + files: + - path: output/seqtk/test.processed.fastq.gz + md5sum: e325ef7deb4023447a1f074e285761af + +- name: seqtk mergepe test_seqtk_mergepe_paired_end + command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c tests/config/nextflow.config + tags: + - seqtk/mergepe + - seqtk + files: + - path: output/seqtk/test.processed.fastq.gz + md5sum: 3f094ef62d9bfe06aa25174a06bc7d04 From 1662201102fc574cbf6a964be1710928bc7d4b1d Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Thu, 28 Oct 2021 17:49:27 +0200 Subject: [PATCH 179/314] Add gatk4/estimatelibrarycomplexity (#965) * Add gatk4/estimatelibrarycomplxity * forgot to add yml * Fix placeholder --- .../estimatelibrarycomplexity/functions.nf | 78 +++++++++++++++++++ .../gatk4/estimatelibrarycomplexity/main.nf | 54 +++++++++++++ .../gatk4/estimatelibrarycomplexity/meta.yml | 56 +++++++++++++ tests/config/pytest_modules.yml | 4 + .../gatk4/estimatelibrarycomplexity/main.nf | 18 +++++ .../gatk4/estimatelibrarycomplexity/test.yml | 7 ++ 6 files changed, 217 insertions(+) create mode 100644 modules/gatk4/estimatelibrarycomplexity/functions.nf create mode 100644 modules/gatk4/estimatelibrarycomplexity/main.nf create mode 100644 modules/gatk4/estimatelibrarycomplexity/meta.yml create mode 100644 tests/modules/gatk4/estimatelibrarycomplexity/main.nf create mode 100644 tests/modules/gatk4/estimatelibrarycomplexity/test.yml diff --git a/modules/gatk4/estimatelibrarycomplexity/functions.nf b/modules/gatk4/estimatelibrarycomplexity/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/estimatelibrarycomplexity/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf new file mode 100644 index 00000000..4cea7086 --- /dev/null +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -0,0 +1,54 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_ESTIMATELIBRARYCOMPLEXITY { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.2.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.2.0--hdfd78af_1" + } else { + container "quay.io/biocontainers/gatk4:4.2.2.0--hdfd78af_1" + } + + input: + tuple val(meta), path(cram) + path(fasta) + path(fai) + path(dict) + + output: + tuple val(meta), path('*.metrics'), emit: metrics + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK EstimateLibraryComplexity] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk EstimateLibraryComplexity \ + ${crams} \ + -O ${prefix}.metrics \ + --REFERENCE_SEQUENCE ${fasta} \ + --VALIDATION_STRINGENCY SILENT \ + --TMP_DIR . $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/estimatelibrarycomplexity/meta.yml b/modules/gatk4/estimatelibrarycomplexity/meta.yml new file mode 100644 index 00000000..94c1817d --- /dev/null +++ b/modules/gatk4/estimatelibrarycomplexity/meta.yml @@ -0,0 +1,56 @@ +name: gatk4_estimatelibrarycomplexity +description: Estimates the numbers of unique molecules in a sequencing library. +keywords: + - gatk4 + - gatk4_estimatelibrarycomplexity + - duplication_metrics + - reporting +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['Apache-2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - metrics: + type: file + description: File containing metrics on the input files + pattern: "*.{metrics}" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 5b371fe5..4782a9ee 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -430,6 +430,10 @@ gatk4/createsomaticpanelofnormals: - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** +gatk4/estimatelibrarycomplexity: + - modules/gatk4/estimatelibrarycomplexity/** + - tests/modules/gatk4/estimatelibrarycomplexity/** + gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf new file mode 100644 index 00000000..72772318 --- /dev/null +++ b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' addParams( options: [:] ) + +workflow test_gatk4_estimatelibrarycomplexity { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_ESTIMATELIBRARYCOMPLEXITY ( input, fasta, fai, dict ) +} diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml new file mode 100644 index 00000000..ca949c00 --- /dev/null +++ b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml @@ -0,0 +1,7 @@ +- name: gatk4 estimatelibrarycomplexity test_gatk4_estimatelibrarycomplexity + command: nextflow run tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c tests/config/nextflow.config + tags: + - gatk4/estimatelibrarycomplexity + - gatk4 + files: + - path: output/gatk4/test.metrics From e8b33e6eb1be2a03abdd2c06068f9a5cebc02bb4 Mon Sep 17 00:00:00 2001 From: Chris Cheshire Date: Thu, 28 Oct 2021 17:50:25 +0100 Subject: [PATCH 180/314] bedtools/genomecov updated to allow for providing a per-sample scale factor (#799) * hifiasm copied from fastqc * hifiasm tests init from fastqc * meta.yml init; test.yml and main.nf for printing version * Add hifiasm version printing * Removed spaced on an empty line * Reverted hifiasm from main * Added genomecov scale module * Updated tagging * Removed extra module - began merging * Removed extra module tests * Updated genomecov to take a scale value * Updated line endings * Removed redundant test * Update tests/modules/bedtools/genomecov/main.nf Co-authored-by: Harshil Patel * Added checking for existing -bg arg * Update modules/bedtools/genomecov/main.nf Co-authored-by: Harshil Patel * Update modules/bedtools/genomecov/main.nf Co-authored-by: Harshil Patel Co-authored-by: Sviatoslav Sidorov Co-authored-by: Svyatoslav Sidorov Co-authored-by: Harshil Patel --- modules/bedtools/genomecov/main.nf | 16 +++++++--- modules/bedtools/genomecov/meta.yml | 4 +++ tests/modules/bedtools/genomecov/main.nf | 36 ++++++++++++++++++++--- tests/modules/bedtools/genomecov/test.yml | 26 +++++++++++++--- 4 files changed, 70 insertions(+), 12 deletions(-) diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index 9d014466..52f37f23 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -19,7 +19,7 @@ process BEDTOOLS_GENOMECOV { } input: - tuple val(meta), path(intervals) + tuple val(meta), path(intervals), val(scale) path sizes val extension @@ -28,13 +28,21 @@ process BEDTOOLS_GENOMECOV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args_token = options.args.tokenize() + def args = options.args + args += (scale > 0 && scale != 1) ? " -scale $scale" : "" + + if (!args_token.contains('-bg') && (scale > 0 && scale != 1)) { + args += " -bg" + } + if (intervals.name =~ /\.bam/) { """ bedtools \\ genomecov \\ -ibam $intervals \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml @@ -48,7 +56,7 @@ process BEDTOOLS_GENOMECOV { genomecov \\ -i $intervals \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml diff --git a/modules/bedtools/genomecov/meta.yml b/modules/bedtools/genomecov/meta.yml index 3deb4d6b..0713e95b 100644 --- a/modules/bedtools/genomecov/meta.yml +++ b/modules/bedtools/genomecov/meta.yml @@ -20,6 +20,9 @@ input: type: file description: BAM/BED/GFF/VCF pattern: "*.{bam|bed|gff|vcf}" + - scale: + type: value + description: Number containing the scale factor for the output. Set to 1 to disable. Setting to a value other than 1 will also get the -bg bedgraph output format as this is required for this command switch - sizes: type: file description: Tab-delimited table of chromosome names in the first column and chromosome sizes in the second column @@ -45,3 +48,4 @@ authors: - "@sruthipsuresh" - "@drpatelh" - "@sidorov-si" + - "@chris-cheshire" diff --git a/tests/modules/bedtools/genomecov/main.nf b/tests/modules/bedtools/genomecov/main.nf index 431a42bd..445ed078 100644 --- a/tests/modules/bedtools/genomecov/main.nf +++ b/tests/modules/bedtools/genomecov/main.nf @@ -4,10 +4,37 @@ nextflow.enable.dsl = 2 include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] ) -workflow test_bedtools_genomecov { +workflow test_bedtools_genomecov_noscale { input = [ [ id:'test'], - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + 1 + ] + + sizes = [] + extension = 'txt' + + BEDTOOLS_GENOMECOV ( input, sizes, extension ) +} + +workflow test_bedtools_genomecov_nonbam_noscale { + input = [ + [ id:'test'], + file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true), + 1 + ] + + sizes = file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) + extension = 'txt' + + BEDTOOLS_GENOMECOV ( input, sizes, extension ) +} + +workflow test_bedtools_genomecov_scale { + input = [ + [ id:'test'], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + 0.5 ] sizes = file('dummy_chromosome_sizes') @@ -16,10 +43,11 @@ workflow test_bedtools_genomecov { BEDTOOLS_GENOMECOV ( input, sizes, extension ) } -workflow test_bedtools_genomecov_nonbam { +workflow test_bedtools_genomecov_nonbam_scale { input = [ [ id:'test'], - file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true), + 0.5 ] sizes = file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) diff --git a/tests/modules/bedtools/genomecov/test.yml b/tests/modules/bedtools/genomecov/test.yml index c1f2080a..477e6555 100644 --- a/tests/modules/bedtools/genomecov/test.yml +++ b/tests/modules/bedtools/genomecov/test.yml @@ -1,5 +1,5 @@ -- name: bedtools genomecov test_bedtools_genomecov - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov -c tests/config/nextflow.config +- name: bedtools genomecov test_bedtools_genomecov_noscale + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c tests/config/nextflow.config tags: - bedtools - bedtools/genomecov @@ -7,11 +7,29 @@ - path: output/bedtools/test_out.txt md5sum: 66083198daca6c001d328ba9616e9b53 -- name: bedtools genomecov test_bedtools_genomecov_nonbam - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam -c tests/config/nextflow.config +- name: bedtools genomecov test_bedtools_genomecov_nonbam_noscale + command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c tests/config/nextflow.config tags: - bedtools - bedtools/genomecov files: - path: output/bedtools/test_out.txt md5sum: f47b58840087426e5b643d8dfd155c1f + +- name: bedtools genomecov test_bedtools_genomecov_scale + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c tests/config/nextflow.config + tags: + - bedtools + - bedtools/genomecov + files: + - path: output/bedtools/test_out.txt + md5sum: 01291b6e1beab72e046653e709eb0e10 + +- name: bedtools genomecov test_bedtools_genomecov_nonbam_scale + command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c tests/config/nextflow.config + tags: + - bedtools + - bedtools/genomecov + files: + - path: output/bedtools/test_out.txt + md5sum: de3c59c0ea123bcdbbad27bc0a0a601e From 5b1fe84bc69a519930872cb3bb942e30c7dee2b3 Mon Sep 17 00:00:00 2001 From: Sateesh <33637490+sateeshperi@users.noreply.github.com> Date: Fri, 29 Oct 2021 04:01:23 -0400 Subject: [PATCH 181/314] add new assembly scan module (#971) * add new assembly scan module * add newline endings * fix newline ending * add newline at end Co-authored-by: Peri Co-authored-by: Robert A. Petit III Co-authored-by: Gregor Sturm --- modules/assemblyscan/functions.nf | 78 +++++++++++++++++++++++++++++ modules/assemblyscan/main.nf | 38 ++++++++++++++ modules/assemblyscan/meta.yml | 43 ++++++++++++++++ tests/config/pytest_modules.yml | 12 +++-- tests/modules/assemblyscan/main.nf | 13 +++++ tests/modules/assemblyscan/test.yml | 7 +++ 6 files changed, 187 insertions(+), 4 deletions(-) create mode 100644 modules/assemblyscan/functions.nf create mode 100644 modules/assemblyscan/main.nf create mode 100644 modules/assemblyscan/meta.yml create mode 100644 tests/modules/assemblyscan/main.nf create mode 100644 tests/modules/assemblyscan/test.yml diff --git a/modules/assemblyscan/functions.nf b/modules/assemblyscan/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/assemblyscan/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf new file mode 100644 index 00000000..5b82f922 --- /dev/null +++ b/modules/assemblyscan/main.nf @@ -0,0 +1,38 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ASSEMBLYSCAN { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::assembly-scan=0.4.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0" + } + + input: + tuple val(meta), path(assembly) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + assembly-scan $assembly > ${prefix}.json + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/assemblyscan/meta.yml b/modules/assemblyscan/meta.yml new file mode 100644 index 00000000..40ea98b9 --- /dev/null +++ b/modules/assemblyscan/meta.yml @@ -0,0 +1,43 @@ +name: assemblyscan +description: Assembly summary statistics in JSON format +keywords: + - assembly + - statistics +tools: + - assemblyscan: + description: Assembly summary statistics in JSON format + homepage: https://github.com/rpetit3/assembly-scan + documentation: https://github.com/rpetit3/assembly-scan + tool_dev_url: https://github.com/rpetit3/assembly-scan + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - assembly: + type: file + description: FASTA file for a given assembly + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: Assembly statistics in JSON format + pattern: "*.json" + +authors: + - "@sateeshperi" + - "@mjcipriano" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4782a9ee..56684f01 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -30,6 +30,10 @@ artic/minion: - modules/artic/minion/** - tests/modules/artic/minion/** +assemblyscan: + - modules/assemblyscan/** + - tests/modules/assemblyscan/** + bamaligncleaner: - modules/bamaligncleaner/** - tests/modules/bamaligncleaner/** @@ -1027,14 +1031,14 @@ seacr/callpeak: - modules/seacr/callpeak/** - tests/modules/seacr/callpeak/** -seqtk/mergepe: - - modules/seqtk/mergepe/** - - tests/modules/seqtk/mergepe/** - seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** +seqtk/mergepe: + - modules/seqtk/mergepe/** + - tests/modules/seqtk/mergepe/** + seqtk/sample: - modules/seqtk/sample/** - tests/modules/seqtk/sample/** diff --git a/tests/modules/assemblyscan/main.nf b/tests/modules/assemblyscan/main.nf new file mode 100644 index 00000000..6f3cbb5e --- /dev/null +++ b/tests/modules/assemblyscan/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' addParams( options: [:] ) + +workflow test_assemblyscan { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + ASSEMBLYSCAN ( input ) +} diff --git a/tests/modules/assemblyscan/test.yml b/tests/modules/assemblyscan/test.yml new file mode 100644 index 00000000..0eb4ad66 --- /dev/null +++ b/tests/modules/assemblyscan/test.yml @@ -0,0 +1,7 @@ +- name: assemblyscan test_assemblyscan + command: nextflow run tests/modules/assemblyscan -entry test_assemblyscan -c tests/config/nextflow.config + tags: + - assemblyscan + files: + - path: output/assemblyscan/test.json + md5sum: 9140e3d43f2d676f62e1325ace5dd8bd From 4b8c7ac7bdee34903c23e05697ac4fff4bdf0b04 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Fri, 29 Oct 2021 10:46:34 +0200 Subject: [PATCH 182/314] Bam2fq (#958) * added template for module * update main * added specific code * wrong variable name in else script * added tests for both split and nosplit * docker test successful - updating yaml * adding echo to version print --- modules/samtools/bam2fq/functions.nf | 78 ++++++++++++++++++++++++++ modules/samtools/bam2fq/main.nf | 64 +++++++++++++++++++++ modules/samtools/bam2fq/meta.yml | 55 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/bam2fq/main.nf | 24 ++++++++ tests/modules/samtools/bam2fq/test.yml | 23 ++++++++ 6 files changed, 248 insertions(+) create mode 100644 modules/samtools/bam2fq/functions.nf create mode 100644 modules/samtools/bam2fq/main.nf create mode 100644 modules/samtools/bam2fq/meta.yml create mode 100644 tests/modules/samtools/bam2fq/main.nf create mode 100644 tests/modules/samtools/bam2fq/test.yml diff --git a/modules/samtools/bam2fq/functions.nf b/modules/samtools/bam2fq/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/samtools/bam2fq/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf new file mode 100644 index 00000000..48e3249c --- /dev/null +++ b/modules/samtools/bam2fq/main.nf @@ -0,0 +1,64 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMTOOLS_BAM2FQ { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" + } else { + container "quay.io/biocontainers/samtools:1.14--hb421002_0" + } + + input: + tuple val(meta), path(inputbam) + val(split) + + output: + tuple val(meta), path("*.fq.gz"), emit: reads + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if (split){ + """ + samtools \\ + bam2fq \\ + $options.args \\ + -@ $task.cpus \\ + -1 ${prefix}_1.fq.gz \\ + -2 ${prefix}_2.fq.gz \\ + -0 ${prefix}_other.fq.gz \\ + -s ${prefix}_singleton.fq.gz \\ + $inputbam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ + } else { + """ + samtools \\ + bam2fq \\ + $options.args \\ + -@ $task.cpus \\ + $inputbam >${prefix}_interleaved.fq.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ + } + +} diff --git a/modules/samtools/bam2fq/meta.yml b/modules/samtools/bam2fq/meta.yml new file mode 100644 index 00000000..f35701c4 --- /dev/null +++ b/modules/samtools/bam2fq/meta.yml @@ -0,0 +1,55 @@ +name: samtools_bam2fq +description: | + The module uses bam2fq method from samtools to + convert a SAM, BAM or CRAM file to FASTQ format +keywords: + - bam2fq + - samtools + - fastq +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files + homepage: None + documentation: http://www.htslib.org/doc/1.1/samtools.html + tool_dev_url: None + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - inputbam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - split: + type: boolean + description: | + TRUE/FALSE value to indicate if reads should be separated into + /1, /2 and if present other, or singleton. + Note: choosing TRUE will generate 4 different files. + Choosing FALSE will produce a single file, which will be interleaved in case + the input contains paired reads. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: | + FASTQ files, which will be either a group of 4 files (read_1, read_2, other and singleton) + or a single interleaved .fq.gz file if the user chooses not to split the reads. + pattern: "*.fq.gz" + +authors: + - "@lescai" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 56684f01..da9de7aa 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -983,6 +983,10 @@ samtools/ampliconclip: - modules/samtools/ampliconclip/** - tests/modules/samtools/ampliconclip/** +samtools/bam2fq: + - modules/samtools/bam2fq/** + - tests/modules/samtools/bam2fq/** + samtools/depth: - modules/samtools/depth/** - tests/modules/samtools/depth/** diff --git a/tests/modules/samtools/bam2fq/main.nf b/tests/modules/samtools/bam2fq/main.nf new file mode 100644 index 00000000..f8614ad0 --- /dev/null +++ b/tests/modules/samtools/bam2fq/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' addParams( options: [args: "-T RX"] ) + +workflow test_samtools_bam2fq_nosplit { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_converted_bam'], checkIfExists: true) ] + split = false + + SAMTOOLS_BAM2FQ ( input, split ) +} + + +workflow test_samtools_bam2fq_withsplit { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_converted_bam'], checkIfExists: true) ] + split = true + + SAMTOOLS_BAM2FQ ( input, split ) +} diff --git a/tests/modules/samtools/bam2fq/test.yml b/tests/modules/samtools/bam2fq/test.yml new file mode 100644 index 00000000..ff1762b3 --- /dev/null +++ b/tests/modules/samtools/bam2fq/test.yml @@ -0,0 +1,23 @@ +- name: samtools bam2fq test_samtools_bam2fq_nosplit + command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c tests/config/nextflow.config + tags: + - samtools/bam2fq + - samtools + files: + - path: output/samtools/test_interleaved.fq.gz + md5sum: d733e66d29a4b366bf9df8c42f845256 + +- name: samtools bam2fq test_samtools_bam2fq_withsplit + command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c tests/config/nextflow.config + tags: + - samtools/bam2fq + - samtools + files: + - path: output/samtools/test_1.fq.gz + md5sum: 4522edbe158ec4804765794569f67493 + - path: output/samtools/test_2.fq.gz + md5sum: 7e00ef40d5cfe272b67461381019dcc1 + - path: output/samtools/test_other.fq.gz + md5sum: 709872fc2910431b1e8b7074bfe38c67 + - path: output/samtools/test_singleton.fq.gz + md5sum: 709872fc2910431b1e8b7074bfe38c67 From 71945a5b5f4126593aef76abdf1a2f82aa468566 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Fri, 29 Oct 2021 11:27:56 +0100 Subject: [PATCH 183/314] Mutect2 add mitochondria mode and update tests (#967) * new mitochondria mode added, tests updated to allow for temp fix for test data * add cram test * bam/bam_idx renamed to input and input_index Co-authored-by: GCJMackenzie --- modules/gatk4/mutect2/main.nf | 41 ++++++++++---------- modules/gatk4/mutect2/meta.yml | 23 ++++++++--- tests/modules/gatk4/mutect2/main.nf | 57 ++++++++++++++++++++++++++-- tests/modules/gatk4/mutect2/test.yml | 26 ++++++++++++- 4 files changed, 116 insertions(+), 31 deletions(-) diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 9b3f8b3f..7999eec3 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -19,9 +19,11 @@ process GATK4_MUTECT2 { } input: - tuple val(meta) , path(bam) , path(bai) , val(which_norm) - val run_single - val run_pon + tuple val(meta) , path(input) , path(input_index) , val(which_norm) + val run_single + val run_pon + val run_mito + val interval_label path fasta path fastaidx path dict @@ -39,35 +41,34 @@ process GATK4_MUTECT2 { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def inputsList = [] - def normalsList = [] - def inputsCommand = '' - def panelsCommand = '' - def normalsCommand = '' + def panels_command = '' + def normals_command = '' - bam.each() {a -> inputsList.add(" -I " + a ) } - inputsCommand = inputsList.join( ' ') + def inputs_command = '-I ' + input.join( ' -I ') if(run_pon) { - panelsCommand = '' - normalsCommand = '' + panels_command = '' + normals_command = '' } else if(run_single) { - panelsCommand = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals" - normalsCommand = '' + panels_command = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals" + normals_command = '' + + } else if(run_mito){ + panels_command = "-L ${interval_label} --mitochondria-mode" + normals_command = '' } else { - panelsCommand = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals --f1r2-tar-gz ${prefix}.f1r2.tar.gz" - which_norm.each() {a -> normalsList.add(" -normal " + a ) } - normalsCommand = normalsList.join( ' ') + panels_command = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals --f1r2-tar-gz ${prefix}.f1r2.tar.gz" + normals_command = '-normal ' + which_norm.join( ' -normal ') } """ gatk Mutect2 \\ -R ${fasta} \\ - ${inputsCommand} \\ - ${normalsCommand} \\ - ${panelsCommand} \\ + ${inputs_command} \\ + ${normals_command} \\ + ${panels_command} \\ -O ${prefix}.vcf.gz \\ $options.args diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 4c38a049..44601e41 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -22,23 +22,34 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test'] - - bam: + - input: type: list - description: list of BAM files - pattern: "*.bam" - - bai: + description: list of BAM files, also able to take CRAM as an input + pattern: "*.{bam/cram}" + - input_index: type: list - description: list of BAM file indexes - pattern: "*.bam.bai" + description: list of BAM file indexes, also able to take CRAM indexes as an input + pattern: "*.{bam.bai/cram.crai}" - which_norm: type: list description: optional list of sample headers contained in the normal sample bam files (these are required for tumor_normal_pair mode) + pattern: "testN" - run_single: type: boolean description: Specify whether or not to run in tumor_single mode instead of tumor_normal_pair mode (will be ignored if run_pon is also true) + pattern: "true/false" - run_pon: type: boolean description: Specify whether or not to run in panel_of_normal mode instead of tumor_normal_pair mode + pattern: "true/false" + - run_mito: + type: boolean + description: Specify whether or not to run in mitochondria-mode instead of tumor_normal_pair mode + pattern: "true/false" + - interval_label: + type: string + description: Specify the label used for mitochondrial chromosome when mutect2 is run in mitochondria mode. + pattern: "chrM" - fasta: type: file description: The reference fasta file diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 072b3125..293739e4 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -3,6 +3,8 @@ nextflow.enable.dsl = 2 include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [:] ) +// used to run with the mitochondria mode setting as this increases sensitivity, allowing for some tumor_normal variants to be detected while the old test data is still in use, will be removed when new test data for sarek is available. +include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [args: '--mitochondria-mode'] ) workflow test_gatk4_mutect2_tumor_normal_pair { input = [ [ id:'test'], // meta map @@ -12,6 +14,8 @@ workflow test_gatk4_mutect2_tumor_normal_pair { ] run_single = false run_pon = false + run_mito = false + interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -20,7 +24,7 @@ workflow test_gatk4_mutect2_tumor_normal_pair { panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) } workflow test_gatk4_mutect2_tumor_single { @@ -31,6 +35,8 @@ workflow test_gatk4_mutect2_tumor_single { ] run_single = true run_pon = false + run_mito = false + interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -39,7 +45,28 @@ workflow test_gatk4_mutect2_tumor_single { panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) +} + +workflow test_gatk4_mutect2_cram_input { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)], + [] + ] + run_single = true + run_pon = false + run_mito = false + interval_label = [] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) } workflow test_gatk4_mutect2_generate_pon { @@ -50,6 +77,8 @@ workflow test_gatk4_mutect2_generate_pon { ] run_single = false run_pon = true + run_mito = false + interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -58,5 +87,27 @@ workflow test_gatk4_mutect2_generate_pon { panel_of_normals = [] panel_of_normals_idx = [] - GATK4_MUTECT2 ( input , run_single , run_pon , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) +} + +// mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. +workflow test_gatk4_mutect2_mitochondria { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] + ] + run_single = false + run_pon = false + run_mito = true + interval_label = 'chr22' + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = [] + germline_resource_idx = [] + panel_of_normals = [] + panel_of_normals_idx = [] + + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) } diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 16f39875..031ed072 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -7,7 +7,7 @@ - path: output/gatk4/test.f1r2.tar.gz - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.stats - md5sum: 6ecb874e6a95aa48233587b876c2a7a9 + md5sum: 887d54e393510f1d0aa2c33bc6155161 - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_tumor_single @@ -18,7 +18,18 @@ files: - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.stats - md5sum: e7ef613f7d158b8a0adf44abe5db2029 + md5sum: 106c5828b02b906c97922618b6072169 + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 mutect2 test_gatk4_mutect2_cram_input + command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_generate_pon @@ -31,3 +42,14 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: 4f77301a125913170b8e9e7828b4ca3f - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 mutect2 test_gatk4_mutect2_mitochondria + command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: fc6ea14ca2da346babe78161beea28c9 + - path: output/gatk4/test.vcf.gz.tbi From ac1e6df076195cec553a2079c9cebd94026a0d47 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 29 Oct 2021 13:01:05 +0200 Subject: [PATCH 184/314] Update to allow cram + update needed to use the gatk4 modules in sarek (#976) * Make samtools/merge cram compliant * samtools/stats cram compliance * update yml file * samtools/view to deal with crams * Update tests to make sure cram works * also fix tmp dir and min mem in one go * basequalityrecal test for cram + min mem + tmpdir * update haplotypecaller for sarek * update haplotype yml * update markdup to allow multiple bams, take out params to be passed with options.args * remove TODO statement * Remove variable md5sum * add emtpy input to stats module in subworkflows * subworkflows seem to work now on my side * Apply code review Co-authored-by: Maxime U. Garcia * replace bam with input to be more inclusive * rename everywhere * rename input * remove variable checksum Co-authored-by: Maxime U. Garcia --- modules/gatk4/applybqsr/main.nf | 10 +++++-- modules/gatk4/applybqsr/meta.yml | 10 +++++-- modules/gatk4/baserecalibrator/main.nf | 11 +++++-- modules/gatk4/baserecalibrator/meta.yml | 11 +++++-- modules/gatk4/haplotypecaller/main.nf | 18 +++++++---- modules/gatk4/haplotypecaller/meta.yml | 23 ++++++++++---- modules/gatk4/markduplicates/main.nf | 13 ++++++-- modules/gatk4/markduplicates/meta.yml | 1 + modules/manta/germline/main.nf | 4 +-- modules/manta/germline/meta.yml | 4 +-- modules/manta/somatic/main.nf | 6 ++-- modules/manta/somatic/meta.yml | 8 ++--- modules/manta/tumoronly/main.nf | 4 +-- modules/manta/tumoronly/meta.yml | 5 ++-- modules/samtools/merge/main.nf | 12 +++++--- modules/samtools/merge/meta.yml | 17 ++++++++--- modules/samtools/stats/main.nf | 6 ++-- modules/samtools/stats/meta.yml | 21 ++++++++----- modules/samtools/view/main.nf | 12 +++++--- modules/samtools/view/meta.yml | 15 ++++++++-- modules/strelka/germline/main.nf | 4 +-- modules/strelka/germline/meta.yml | 12 ++++---- modules/strelka/somatic/main.nf | 6 ++-- modules/strelka/somatic/meta.yml | 8 ++--- .../nf-core/bam_stats_samtools/main.nf | 2 +- tests/modules/gatk4/applybqsr/main.nf | 14 +++++++++ tests/modules/gatk4/applybqsr/test.yml | 17 ++++++++--- tests/modules/gatk4/baserecalibrator/main.nf | 15 ++++++++++ tests/modules/gatk4/baserecalibrator/test.yml | 15 ++++++++-- tests/modules/gatk4/haplotypecaller/main.nf | 30 ++++++++++++++++++- tests/modules/gatk4/haplotypecaller/test.yml | 25 ++++++++++++---- tests/modules/gatk4/markduplicates/main.nf | 9 ++++++ tests/modules/gatk4/markduplicates/test.yml | 19 ++++++++++-- tests/modules/samtools/merge/main.nf | 12 +++++++- tests/modules/samtools/merge/test.yml | 14 +++++++-- tests/modules/samtools/stats/main.nf | 12 +++++++- tests/modules/samtools/stats/test.yml | 15 ++++++++-- tests/modules/samtools/view/main.nf | 13 ++++++-- tests/modules/samtools/view/test.yml | 12 ++++++-- 39 files changed, 356 insertions(+), 109 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index e804bcff..508a29ca 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -19,7 +19,7 @@ process GATK4_APPLYBQSR { } input: - tuple val(meta), path(bam), path(bai), path(bqsr_table) + tuple val(meta), path(input), path(input_index), path(bqsr_table) path fasta path fastaidx path dict @@ -32,12 +32,18 @@ process GATK4_APPLYBQSR { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" + if (!task.memory) { + log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ gatk ApplyBQSR \\ -R $fasta \\ - -I $bam \\ + -I $input \\ --bqsr-recal-file $bqsr_table \\ $interval \\ + --tmp-dir . \\ -O ${prefix}.bam \\ $options.args diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index e09e8c52..b002dca6 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -20,10 +20,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file - description: BAM file from alignment - pattern: "*.{bam}" + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: + type: file + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" - bqsr_table: type: file description: Recalibration table from gatk4_baserecalibrator diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 6033fbf1..85c30daf 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -19,7 +19,7 @@ process GATK4_BASERECALIBRATOR { } input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(input), path(input_index) path fasta path fastaidx path dict @@ -35,12 +35,19 @@ process GATK4_BASERECALIBRATOR { def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') + + if (!task.memory) { + log.info '[GATK BaseRecalibrator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ gatk BaseRecalibrator \ -R $fasta \ - -I $bam \ + -I $input \ $sitesCommand \ $intervalsCommand \ + --tmp-dir . \ $options.args \ -O ${prefix}.table diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index d579d9e5..7fd273e1 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -20,10 +20,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file - description: BAM file from alignment - pattern: "*.{bam}" + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: + type: file + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" - fasta: type: file description: The reference fasta file @@ -57,3 +61,4 @@ output: authors: - "@yocra3" + - "@FriederikeHanssen" diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 01b71ccb..4bddbb6d 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -19,10 +19,13 @@ process GATK4_HAPLOTYPECALLER { } input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(input), path(input_index) path fasta path fai path dict + path dbsnp + path dbsnp_tbi + path interval output: tuple val(meta), path("*.vcf.gz"), emit: vcf @@ -30,8 +33,10 @@ process GATK4_HAPLOTYPECALLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def avail_mem = 3 + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def interval_option = interval ? "-L ${interval}" : "" + def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" + def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { @@ -42,9 +47,12 @@ process GATK4_HAPLOTYPECALLER { --java-options "-Xmx${avail_mem}g" \\ HaplotypeCaller \\ -R $fasta \\ - -I $bam \\ + -I $input \\ + ${dbsnp_option} \\ + ${interval_option} \\ -O ${prefix}.vcf.gz \\ - $options.args + $options.args \\ + --tmp-dir . cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/gatk4/haplotypecaller/meta.yml b/modules/gatk4/haplotypecaller/meta.yml index 6a1bd7ed..6c9d0891 100644 --- a/modules/gatk4/haplotypecaller/meta.yml +++ b/modules/gatk4/haplotypecaller/meta.yml @@ -21,14 +21,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file - description: BAM file - pattern: "*.bam" - - bai: + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: type: file - description: Index of BAM file - pattern: "*.bam.bai" + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" - fasta: type: file description: The reference fasta file @@ -41,6 +41,16 @@ input: type: file description: GATK sequence dictionary pattern: "*.dict" + - dbsnp: + type: file + description: VCF file containing known sites (optional) + - dbsnp_tbi: + type: file + description: VCF index of dbsnp (optional) + - interval: + type: file + description: Bed file with the genomic regions included in the library (optional) + output: - meta: type: map @@ -62,3 +72,4 @@ output: authors: - "@suzannejin" + - "@FriederikeHanssen" diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 8f94f4dd..b1ff5222 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -19,21 +19,28 @@ process GATK4_MARKDUPLICATES { } input: - tuple val(meta), path(bam) + tuple val(meta), path(bams) output: tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.bai") , emit: bai tuple val(meta), path("*.metrics"), emit: metrics path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ gatk MarkDuplicates \\ - --INPUT $bam \\ + $bam_list \\ --METRICS_FILE ${prefix}.metrics \\ --TMP_DIR . \\ - --ASSUME_SORT_ORDER coordinate \\ --CREATE_INDEX true \\ --OUTPUT ${prefix}.bam \\ $options.args diff --git a/modules/gatk4/markduplicates/meta.yml b/modules/gatk4/markduplicates/meta.yml index 59aaad4d..5777067a 100644 --- a/modules/gatk4/markduplicates/meta.yml +++ b/modules/gatk4/markduplicates/meta.yml @@ -47,3 +47,4 @@ output: authors: - "@ajodeh-juma" + - "@FriederikeHanssen" diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index ca2ac9dc..f957a7ec 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -19,7 +19,7 @@ process MANTA_GERMLINE { } input: - tuple val(meta), path(cram), path(crai) + tuple val(meta), path(input), path(input_index) path fasta path fai path target_bed @@ -39,7 +39,7 @@ process MANTA_GERMLINE { def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ - --bam $cram \ + --bam $input \ --reference $fasta \ $options_manta \ --runDir manta diff --git a/modules/manta/germline/meta.yml b/modules/manta/germline/meta.yml index 7933fd6c..3bdb8264 100644 --- a/modules/manta/germline/meta.yml +++ b/modules/manta/germline/meta.yml @@ -23,11 +23,11 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - cram: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai: + - input_index: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index 16a30f17..f912d478 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -19,7 +19,7 @@ process MANTA_SOMATIC { } input: - tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor) + tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) path fasta path fai path target_bed @@ -42,8 +42,8 @@ process MANTA_SOMATIC { """ configManta.py \ - --tumorBam $cram_tumor \ - --normalBam $cram_normal \ + --tumorBam $input_tumor \ + --normalBam $input_normal \ --reference $fasta \ $options_manta \ --runDir manta diff --git a/modules/manta/somatic/meta.yml b/modules/manta/somatic/meta.yml index 08103ba7..ddd0eafe 100644 --- a/modules/manta/somatic/meta.yml +++ b/modules/manta/somatic/meta.yml @@ -23,19 +23,19 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - cram_normal: + - input_normal: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai_normal: + - input_index_normal: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" - - cram_tumor: + - input_tumor: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai_tumor: + - input_index_tumor: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index a86279df..f20e8128 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -19,7 +19,7 @@ process MANTA_TUMORONLY { } input: - tuple val(meta), path(cram), path(crai) + tuple val(meta), path(input), path(input_index) path fasta path fai path target_bed @@ -39,7 +39,7 @@ process MANTA_TUMORONLY { def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ - --tumorBam $cram \ + --tumorBam $input \ --reference $fasta \ $options_manta \ --runDir manta diff --git a/modules/manta/tumoronly/meta.yml b/modules/manta/tumoronly/meta.yml index d4af9402..86d1c6c0 100644 --- a/modules/manta/tumoronly/meta.yml +++ b/modules/manta/tumoronly/meta.yml @@ -23,11 +23,11 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - cram: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai: + - input_index: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" @@ -54,7 +54,6 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - candidate_small_indels_vcf: type: file description: Gzipped VCF file containing variants diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 34c40d57..fefb423b 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -19,16 +19,20 @@ process SAMTOOLS_MERGE { } input: - tuple val(meta), path(bams) + tuple val(meta), path(input_files) + path fasta output: - tuple val(meta), path("${prefix}.bam"), emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}.bam"), optional:true, emit: bam + tuple val(meta), path("${prefix}.cram"), optional:true, emit: cram + path "versions.yml" , emit: versions script: prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def file_type = input_files[0].getExtension() + def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge ${prefix}.bam $bams + samtools merge ${reference} ${prefix}.${file_type} $input_files cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/merge/meta.yml b/modules/samtools/merge/meta.yml index 78b75b36..2576a3a3 100644 --- a/modules/samtools/merge/meta.yml +++ b/modules/samtools/merge/meta.yml @@ -1,5 +1,5 @@ name: samtools_merge -description: Merge BAM file +description: Merge BAM or CRAM file keywords: - merge - bam @@ -21,20 +21,28 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input_files: type: file - description: BAM file + description: BAM/CRAM file pattern: "*.{bam,cram,sam}" + - fasta: + type: optional file + description: Reference file the CRAM was created with + pattern: "*.{fasta,fa}" output: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - merged_bam: + - bam: type: file description: BAM file pattern: "*.{bam}" + - cram: + type: file + description: CRAM file + pattern: "*.{cram}" - versions: type: file description: File containing software versions @@ -43,3 +51,4 @@ authors: - "@drpatelh" - "@yuukiiwa " - "@maxulysse" + - "@FriederikeHanssen" diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 6218dd2d..aab43410 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -19,15 +19,17 @@ process SAMTOOLS_STATS { } input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(input), path(input_index) + path fasta output: tuple val(meta), path("*.stats"), emit: stats path "versions.yml" , emit: versions script: + def reference = fasta ? "--reference ${fasta}" : "" """ - samtools stats $bam > ${bam}.stats + samtools stats ${reference} ${input} > ${input}.stats cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/stats/meta.yml b/modules/samtools/stats/meta.yml index ae41498a..869e62e3 100644 --- a/modules/samtools/stats/meta.yml +++ b/modules/samtools/stats/meta.yml @@ -22,14 +22,18 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: - type: file - description: BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" - - bai: - type: file - description: Index for BAM/CRAM/SAM file - pattern: "*.{bai,crai,sai}" + - input: + type: file + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: + type: file + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" + - fasta: + type: optional file + description: Reference file the CRAM was created with + pattern: "*.{fasta,fa}" output: - meta: type: map @@ -46,3 +50,4 @@ output: pattern: "versions.yml" authors: - "@drpatelh" + - "@FriederikeHanssen" diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index ec1663e0..b7a047ee 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -19,16 +19,20 @@ process SAMTOOLS_VIEW { } input: - tuple val(meta), path(bam) + tuple val(meta), path(input) + path fasta output: - tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam") , optional: true, emit: bam + tuple val(meta), path("*.cram"), optional: true, emit: cram + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def reference = fasta ? "--reference ${fasta} -C" : "" + def file_type = input.getExtension() """ - samtools view $options.args $bam > ${prefix}.bam + samtools view ${reference} $options.args $input > ${prefix}.${file_type} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/view/meta.yml b/modules/samtools/view/meta.yml index 29d1ecc1..8abf34af 100644 --- a/modules/samtools/view/meta.yml +++ b/modules/samtools/view/meta.yml @@ -21,10 +21,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" + - fasta: + type: optional file + description: Reference file the CRAM was created with + pattern: "*.{fasta,fa}" output: - meta: type: map @@ -33,8 +37,12 @@ output: e.g. [ id:'test', single_end:false ] - bam: type: file - description: filtered/converted BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + description: filtered/converted BAM/SAM file + pattern: "*.{bam,sam}" + - cram: + type: file + description: filtered/converted CRAM file + pattern: "*.cram" - versions: type: file description: File containing software versions @@ -42,3 +50,4 @@ output: authors: - "@drpatelh" - "@joseespinosa" + - "@FriederikeHanssen" diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 0d201940..5e913c40 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -19,7 +19,7 @@ process STRELKA_GERMLINE { } input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(input), path(input_index) path fasta path fai path target_bed @@ -38,7 +38,7 @@ process STRELKA_GERMLINE { def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ - --bam $bam \\ + --bam $input \\ --referenceFasta $fasta \\ $regions \\ $options.args \\ diff --git a/modules/strelka/germline/meta.yml b/modules/strelka/germline/meta.yml index 3f86b045..2eeb0f8f 100644 --- a/modules/strelka/germline/meta.yml +++ b/modules/strelka/germline/meta.yml @@ -21,14 +21,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test'] - - bam: + - input: type: file - description: BAM file - pattern: "*.{bam}" - - bai: + description: BAM/CRAM file + pattern: "*.{bam,cram}" + - input_index: type: file - description: BAM index file - pattern: "*.{bai}" + description: BAM/CRAI index file + pattern: "*.{bai,crai}" - target_bed: type: file description: An optional bed file diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 02bd5822..633b0a2c 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -19,7 +19,7 @@ process STRELKA_SOMATIC { } input: - tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) + tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) path fasta path fai path target_bed @@ -38,8 +38,8 @@ process STRELKA_SOMATIC { def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ configureStrelkaSomaticWorkflow.py \\ - --tumor $cram_tumor \\ - --normal $cram_normal \\ + --tumor $input_tumor \\ + --normal $input_normal \\ --referenceFasta $fasta \\ $options_target_bed \\ $options_manta \\ diff --git a/modules/strelka/somatic/meta.yml b/modules/strelka/somatic/meta.yml index ce5acb33..076c1036 100644 --- a/modules/strelka/somatic/meta.yml +++ b/modules/strelka/somatic/meta.yml @@ -21,19 +21,19 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - cram_normal: + - input_normal: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai_normal: + - input_index_normal: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" - - cram_tumor: + - input_tumor: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - crai_tumor: + - input_index_tumor: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" diff --git a/subworkflows/nf-core/bam_stats_samtools/main.nf b/subworkflows/nf-core/bam_stats_samtools/main.nf index 9276232c..463ec99d 100644 --- a/subworkflows/nf-core/bam_stats_samtools/main.nf +++ b/subworkflows/nf-core/bam_stats_samtools/main.nf @@ -15,7 +15,7 @@ workflow BAM_STATS_SAMTOOLS { main: ch_versions = Channel.empty() - SAMTOOLS_STATS ( ch_bam_bai ) + SAMTOOLS_STATS ( ch_bam_bai, [] ) ch_versions = ch_versions.mix(SAMTOOLS_STATS.out.versions.first()) SAMTOOLS_FLAGSTAT ( ch_bam_bai ) diff --git a/tests/modules/gatk4/applybqsr/main.nf b/tests/modules/gatk4/applybqsr/main.nf index 5fb590b0..80b51015 100644 --- a/tests/modules/gatk4/applybqsr/main.nf +++ b/tests/modules/gatk4/applybqsr/main.nf @@ -30,3 +30,17 @@ workflow test_gatk4_applybqsr_intervals { GATK4_APPLYBQSR ( input, fasta, fai, dict, intervals ) } + +workflow test_gatk4_applybqsr_cram { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + intervals = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_APPLYBQSR ( input, fasta, fai, dict, intervals ) +} diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index 983cc09a..ed89c6ff 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -1,17 +1,26 @@ - name: gatk4 applybqsr test_gatk4_applybqsr command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c tests/config/nextflow.config tags: - - gatk4 - gatk4/applybqsr + - gatk4 files: - path: output/gatk4/test.bam - md5sum: dac716c394db5e83c12b44355c098ca7 + md5sum: 87a2eabae2b7b41574f966612b5addae - name: gatk4 applybqsr test_gatk4_applybqsr_intervals command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config tags: - - gatk4 - gatk4/applybqsr + - gatk4 files: - path: output/gatk4/test.bam - md5sum: 400441dbe5344658580ba0a24ba57069 + md5sum: 9c015d3c1dbd9eee793b7386f432b6aa + +- name: gatk4 applybqsr test_gatk4_applybqsr_cram + command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config + tags: + - gatk4/applybqsr + - gatk4 + files: + - path: output/gatk4/test.bam + md5sum: 02f84815fdbc99c21c8d42ebdcabbbf7 diff --git a/tests/modules/gatk4/baserecalibrator/main.nf b/tests/modules/gatk4/baserecalibrator/main.nf index 671a1d67..a50c09e3 100644 --- a/tests/modules/gatk4/baserecalibrator/main.nf +++ b/tests/modules/gatk4/baserecalibrator/main.nf @@ -18,6 +18,21 @@ workflow test_gatk4_baserecalibrator { GATK4_BASERECALIBRATOR ( input, fasta, fai, dict, [], sites, sites_tbi ) } +workflow test_gatk4_baserecalibrator_cram { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + sites = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + sites_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + GATK4_BASERECALIBRATOR ( input, fasta, fai, dict, [], sites, sites_tbi ) +} + workflow test_gatk4_baserecalibrator_intervals { input = [ [ id:'test' ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), diff --git a/tests/modules/gatk4/baserecalibrator/test.yml b/tests/modules/gatk4/baserecalibrator/test.yml index 3c30d78f..a15c9ee3 100644 --- a/tests/modules/gatk4/baserecalibrator/test.yml +++ b/tests/modules/gatk4/baserecalibrator/test.yml @@ -1,17 +1,26 @@ - name: gatk4 baserecalibrator test_gatk4_baserecalibrator command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c tests/config/nextflow.config tags: - - gatk4/baserecalibrator - gatk4 + - gatk4/baserecalibrator files: - path: output/gatk4/test.table md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 +- name: gatk4 baserecalibrator test_gatk4_baserecalibrator_cram + command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/baserecalibrator + files: + - path: output/gatk4/test.table + md5sum: 35d89a3811aa31711fc9815b6b80e6ec + - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_intervals command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c tests/config/nextflow.config tags: - - gatk4/baserecalibrator - gatk4 + - gatk4/baserecalibrator files: - path: output/gatk4/test.table md5sum: 9ecb5f00a2229291705addc09c0ec231 @@ -19,8 +28,8 @@ - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_multiple_sites command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c tests/config/nextflow.config tags: - - gatk4/baserecalibrator - gatk4 + - gatk4/baserecalibrator files: - path: output/gatk4/test.table md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 diff --git a/tests/modules/gatk4/haplotypecaller/main.nf b/tests/modules/gatk4/haplotypecaller/main.nf index 76059074..fd5f30fa 100644 --- a/tests/modules/gatk4/haplotypecaller/main.nf +++ b/tests/modules/gatk4/haplotypecaller/main.nf @@ -13,5 +13,33 @@ workflow test_gatk4_haplotypecaller { fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - GATK4_HAPLOTYPECALLER ( input, fasta, fai, dict ) + GATK4_HAPLOTYPECALLER ( input, fasta, fai, dict, [], [], [] ) +} + +workflow test_gatk4_haplotypecaller_cram { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_HAPLOTYPECALLER ( input, fasta, fai, dict, [], [], [] ) +} + +workflow test_gatk4_haplotypecaller_intervals_dbsnp { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + sites = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + sites_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + intervals = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_HAPLOTYPECALLER ( input, fasta, fai, dict, sites, sites_tbi, intervals ) } diff --git a/tests/modules/gatk4/haplotypecaller/test.yml b/tests/modules/gatk4/haplotypecaller/test.yml index dee2a2ab..480ff8f0 100644 --- a/tests/modules/gatk4/haplotypecaller/test.yml +++ b/tests/modules/gatk4/haplotypecaller/test.yml @@ -1,13 +1,26 @@ - name: gatk4 haplotypecaller test_gatk4_haplotypecaller command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller -c tests/config/nextflow.config tags: - - gatk4 - gatk4/haplotypecaller + - gatk4 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 haplotypecaller test_gatk4_haplotypecaller_cram + command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c tests/config/nextflow.config + tags: + - gatk4/haplotypecaller + - gatk4 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 haplotypecaller test_gatk4_haplotypecaller_intervals_dbsnp + command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c tests/config/nextflow.config + tags: + - gatk4/haplotypecaller + - gatk4 files: - path: output/gatk4/test.vcf.gz - should_exist: true - contains: - - 'MT192765.1' - - '54.60' - - '37.32' - path: output/gatk4/test.vcf.gz.tbi diff --git a/tests/modules/gatk4/markduplicates/main.nf b/tests/modules/gatk4/markduplicates/main.nf index 06425088..b9709dc0 100644 --- a/tests/modules/gatk4/markduplicates/main.nf +++ b/tests/modules/gatk4/markduplicates/main.nf @@ -11,3 +11,12 @@ workflow test_gatk4_markduplicates { GATK4_MARKDUPLICATES ( input ) } + +workflow test_gatk4_markduplicates_multiple_bams { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + ] + + GATK4_MARKDUPLICATES ( input ) +} diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 028147e6..99296ca4 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -1,8 +1,23 @@ - name: gatk4 markduplicates test_gatk4_markduplicates command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates -c tests/config/nextflow.config tags: - - gatk4 - gatk4/markduplicates + - gatk4 files: + - path: output/gatk4/test.bai + md5sum: e9c125e82553209933883b4fe2b8d7c2 - path: output/gatk4/test.bam - md5sum: 3b6facab3afbacfa08a7a975efbd2c6b + md5sum: bda9a7bf5057f2288ed70be3eb8a753f + - path: output/gatk4/test.metrics + +- name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams + command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c tests/config/nextflow.config + tags: + - gatk4/markduplicates + - gatk4 + files: + - path: output/gatk4/test.bai + md5sum: 93cebe29e7cca2064262b739235cca9b + - path: output/gatk4/test.bam + md5sum: dcd6f584006b04141fb787001a8ecacc + - path: output/gatk4/test.metrics diff --git a/tests/modules/samtools/merge/main.nf b/tests/modules/samtools/merge/main.nf index a4511a34..07485df1 100644 --- a/tests/modules/samtools/merge/main.nf +++ b/tests/modules/samtools/merge/main.nf @@ -11,5 +11,15 @@ workflow test_samtools_merge { file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true)] ] - SAMTOOLS_MERGE ( input ) + SAMTOOLS_MERGE ( input, [] ) +} + +workflow test_samtools_merge_cram { + input = [ [ id: 'test' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + SAMTOOLS_MERGE ( input, fasta ) } diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index d0674ca4..b39ca2ec 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,7 +1,15 @@ -- name: samtools merge - command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config +- name: samtools merge test_samtools_merge + command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config tags: - - samtools - samtools/merge + - samtools files: - path: output/samtools/test_merged.bam + +- name: samtools merge test_samtools_merge_cram + command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config + tags: + - samtools/merge + - samtools + files: + - path: output/samtools/test_merged.cram diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 04a689fe..8e8b0c88 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -10,5 +10,15 @@ workflow test_samtools_stats { file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] - SAMTOOLS_STATS ( input ) + SAMTOOLS_STATS ( input, []) +} + +workflow test_samtools_stats_cram { + input = [ [ id: 'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + SAMTOOLS_STATS ( input, fasta ) } diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index cf44b846..a194c666 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,8 +1,17 @@ -- name: samtools stats - command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config +- name: samtools stats test_samtools_stats + command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config tags: - samtools - samtools/stats files: - - path: ./output/samtools/test.paired_end.sorted.bam.stats + - path: output/samtools/test.paired_end.sorted.bam.stats md5sum: a7f36cf11fd3bf97e0a0ae29c0627296 + +- name: samtools stats test_samtools_stats_cram + command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config + tags: + - samtools + - samtools/stats + files: + - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats + md5sum: bd55a1da30028403f4b66dacf7a2a20e diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index c60acb73..bd270cd8 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -7,8 +7,17 @@ include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' addPa workflow test_samtools_view { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] - SAMTOOLS_VIEW ( input ) + SAMTOOLS_VIEW ( input, [] ) +} + +workflow test_samtools_view_cram { + input = [ [ id: 'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + SAMTOOLS_VIEW ( input, fasta ) } diff --git a/tests/modules/samtools/view/test.yml b/tests/modules/samtools/view/test.yml index 383dfa87..ceaa0e89 100644 --- a/tests/modules/samtools/view/test.yml +++ b/tests/modules/samtools/view/test.yml @@ -1,8 +1,16 @@ -- name: samtools view +- name: samtools view test_samtools_view command: nextflow run tests/modules/samtools/view -entry test_samtools_view -c tests/config/nextflow.config tags: - - samtools - samtools/view + - samtools files: - path: output/samtools/test.bam md5sum: 8fb1e82f76416e9e30fc6b2357e2cf13 + +- name: samtools view test_samtools_view_cram + command: nextflow run tests/modules/samtools/view -entry test_samtools_view_cram -c tests/config/nextflow.config + tags: + - samtools/view + - samtools + files: + - path: output/samtools/test.cram From 84cb78cc98ba7fe5c9de7227cf9824b13624ce88 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Fri, 29 Oct 2021 13:23:34 +0200 Subject: [PATCH 185/314] Khmer normalizebymedian (#985) * Templates for new module * pe only test passing * only_pe and only_se passing * only_pe, only_se, mixed passes * Multiple pe + se tc passes * Passing args works * Add 'interleaved' to description * Fixed linting message * Update modules/khmer/normalizebymedian/main.nf Good point. Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/khmer/normalizebymedian/functions.nf | 78 +++++++++++++++++ modules/khmer/normalizebymedian/main.nf | 49 +++++++++++ modules/khmer/normalizebymedian/meta.yml | 39 +++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/khmer/normalizebymedian/main.nf | 85 +++++++++++++++++++ .../modules/khmer/normalizebymedian/test.yml | 42 +++++++++ 6 files changed, 297 insertions(+) create mode 100644 modules/khmer/normalizebymedian/functions.nf create mode 100644 modules/khmer/normalizebymedian/main.nf create mode 100644 modules/khmer/normalizebymedian/meta.yml create mode 100644 tests/modules/khmer/normalizebymedian/main.nf create mode 100644 tests/modules/khmer/normalizebymedian/test.yml diff --git a/modules/khmer/normalizebymedian/functions.nf b/modules/khmer/normalizebymedian/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/khmer/normalizebymedian/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/khmer/normalizebymedian/main.nf b/modules/khmer/normalizebymedian/main.nf new file mode 100644 index 00000000..234d172b --- /dev/null +++ b/modules/khmer/normalizebymedian/main.nf @@ -0,0 +1,49 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KHMER_NORMALIZEBYMEDIAN { + tag "${name}" + label 'process_long' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::khmer=3.0.0a3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2" + } else { + container "quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2" + } + + input: + path pe_reads + path se_reads + val name + + output: + path "${name}.fastq.gz", emit: reads + path "versions.yml" , emit: versions + + script: + pe_args = pe_reads ? "--paired" : "" + se_args = se_reads ? "--unpaired-reads ${se_reads}" : "" + files = pe_reads ? pe_reads : se_reads + + """ + normalize-by-median.py \\ + -M ${task.memory.toGiga()}e9 \\ + --gzip ${options.args} \\ + -o ${name}.fastq.gz \\ + ${pe_args} \\ + ${se_args} \\ + ${files} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) + END_VERSIONS + """ +} diff --git a/modules/khmer/normalizebymedian/meta.yml b/modules/khmer/normalizebymedian/meta.yml new file mode 100644 index 00000000..2227750f --- /dev/null +++ b/modules/khmer/normalizebymedian/meta.yml @@ -0,0 +1,39 @@ +name: khmer_normalizebymedian +description: Module that calls normalize-by-median.py from khmer. The module can take a mix of paired end (interleaved) and single end reads. If both types are provided, only a single file with single ends is possible. +keywords: + - digital normalization + - khmer +tools: + - khmer: + description: khmer k-mer counting library + homepage: https://github.com/dib-lab/khmer + documentation: https://khmer.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/dib-lab/khmer + doi: "https://doi.org/10.12688/f1000research.6924.1" + licence: ['BSD License'] + +input: + - pe_reads: + type: files + description: Paired-end interleaved fastq files + pattern: "*.{fq,fastq}.gz" + - se_reads: + type: files + description: Single-end fastq files + pattern: "*.{fq,fastq}.gz" + - name: + type: string + description: filename for output file(s); ".fastq.gz" will be appended + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: Interleaved fastq files + pattern: "*.{fq,fastq}.gz" + +authors: + - "@erikrikarddaniel" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index da9de7aa..0fd84d24 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -629,6 +629,10 @@ kallistobustools/ref: - modules/kallistobustools/ref/** - tests/modules/kallistobustools/ref/** +khmer/normalizebymedian: + - modules/khmer/normalizebymedian/** + - tests/modules/khmer/normalizebymedian/** + kleborate: - modules/kleborate/** - tests/modules/kleborate/** diff --git a/tests/modules/khmer/normalizebymedian/main.nf b/tests/modules/khmer/normalizebymedian/main.nf new file mode 100644 index 00000000..3a3b348c --- /dev/null +++ b/tests/modules/khmer/normalizebymedian/main.nf @@ -0,0 +1,85 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [:] ) +include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [:] ) +include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [args: '-C 20 -k 32'] ) + +workflow test_khmer_normalizebymedian_only_pe { + + pe_reads = [ + [ id:'khmer_test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + SEQTK_MERGEPE(pe_reads) + + KHMER_NORMALIZEBYMEDIAN ( SEQTK_MERGEPE.out.reads.collect { it[1] }, [], 'only_pe' ) +} + +workflow test_khmer_normalizebymedian_only_se { + + se_reads = [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + + KHMER_NORMALIZEBYMEDIAN ( [], se_reads, 'only_se' ) +} + +workflow test_khmer_normalizebymedian_mixed { + + pe_reads = [ + [ id:'khmer_test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + se_reads = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + + SEQTK_MERGEPE(pe_reads) + + KHMER_NORMALIZEBYMEDIAN ( SEQTK_MERGEPE.out.reads.map { it[1] }, se_reads, 'mixed' ) +} + +workflow test_khmer_normalizebymedian_multiple_pe { + + pe_reads = [ + [ id:'khmer_test0', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ], + [ id:'khmer_test1', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + se_reads = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + + SEQTK_MERGEPE(pe_reads) + + KHMER_NORMALIZEBYMEDIAN ( SEQTK_MERGEPE.out.reads.collect { it[1] }, se_reads, 'multiple_pe' ) +} + +workflow test_khmer_normalizebymedian_args { + + pe_reads = [ + [ id:'khmer_test0', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + se_reads = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + + SEQTK_MERGEPE(pe_reads) + + KHMER_NORMALIZEBYMEDIAN_ARGS ( SEQTK_MERGEPE.out.reads.collect { it[1] }, se_reads, 'args' ) +} diff --git a/tests/modules/khmer/normalizebymedian/test.yml b/tests/modules/khmer/normalizebymedian/test.yml new file mode 100644 index 00000000..a914a8ef --- /dev/null +++ b/tests/modules/khmer/normalizebymedian/test.yml @@ -0,0 +1,42 @@ +# nf-core modules create-test-yml khmer/normalizebymedian +- name: khmer normalizebymedian only pe reads + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c tests/config/nextflow.config + tags: + - khmer + - khmer/normalizebymedian + files: + - path: output/khmer/only_pe.fastq.gz + # md5sum not stable even locally with docker (gzip done by tool) + #md5sum: 75e05f2e80cf4bd0b534d4b73f7c059c + +- name: khmer normalizebymedian only se reads + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c tests/config/nextflow.config + tags: + - khmer + - khmer/normalizebymedian + files: + - path: output/khmer/only_se.fastq.gz + +- name: khmer normalizebymedian mixed reads + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c tests/config/nextflow.config + tags: + - khmer + - khmer/normalizebymedian + files: + - path: output/khmer/mixed.fastq.gz + +- name: khmer normalizebymedian multiple pe reads + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c tests/config/nextflow.config + tags: + - khmer + - khmer/normalizebymedian + files: + - path: output/khmer/multiple_pe.fastq.gz + +- name: khmer normalizebymedian args + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c tests/config/nextflow.config + tags: + - khmer + - khmer/normalizebymedian + files: + - path: output/khmer/args.fastq.gz From 460a3ed87bcd918aee869256cae298457752f921 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Fri, 29 Oct 2021 14:00:54 +0200 Subject: [PATCH 186/314] Fgbio group reads by umi (#952) * adding template for module groupreadsbyumi * update modules with code * strategy is required argument so moving it to input rather than options.args * tests successful committing yml * added meta to output Co-authored-by: Gregor Sturm --- modules/fgbio/groupreadsbyumi/functions.nf | 78 ++++++++++++++++++++ modules/fgbio/groupreadsbyumi/main.nf | 50 +++++++++++++ modules/fgbio/groupreadsbyumi/meta.yml | 59 +++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/fgbio/groupreadsbyumi/main.nf | 15 ++++ tests/modules/fgbio/groupreadsbyumi/test.yml | 10 +++ 6 files changed, 216 insertions(+) create mode 100644 modules/fgbio/groupreadsbyumi/functions.nf create mode 100644 modules/fgbio/groupreadsbyumi/main.nf create mode 100644 modules/fgbio/groupreadsbyumi/meta.yml create mode 100644 tests/modules/fgbio/groupreadsbyumi/main.nf create mode 100644 tests/modules/fgbio/groupreadsbyumi/test.yml diff --git a/modules/fgbio/groupreadsbyumi/functions.nf b/modules/fgbio/groupreadsbyumi/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fgbio/groupreadsbyumi/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf new file mode 100644 index 00000000..8e16f0a5 --- /dev/null +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -0,0 +1,50 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FGBIO_GROUPREADSBYUMI { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" + } else { + container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" + } + + input: + tuple val(meta), path(taggedbam) + val(strategy) + + output: + tuple val(meta), path("*_umi-grouped.bam") , emit: bam + tuple val(meta), path("*_umi_histogram.txt"), emit: histogram + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + mkdir tmp + + fgbio \\ + --tmp-dir=${PWD}/tmp \\ + GroupReadsByUmi \\ + -s $strategy \\ + ${options.args} \\ + -i $taggedbam \\ + -o ${prefix}_umi-grouped.bam \\ + -f ${prefix}_umi_histogram.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + END_VERSIONS + """ +} diff --git a/modules/fgbio/groupreadsbyumi/meta.yml b/modules/fgbio/groupreadsbyumi/meta.yml new file mode 100644 index 00000000..18ce149e --- /dev/null +++ b/modules/fgbio/groupreadsbyumi/meta.yml @@ -0,0 +1,59 @@ +name: fgbio_groupreadsbyumi +description: | + Groups reads together that appear to have come from the same original molecule. + Reads are grouped by template, and then templates are sorted by the 5’ mapping positions + of the reads from the template, used from earliest mapping position to latest. + Reads that have the same end positions are then sub-grouped by UMI sequence. + (!) Note: the MQ tag is required on reads with mapped mates (!) + This can be added using samblaster with the optional argument --addMateTags. +keywords: + - UMI + - groupreads + - fgbio +tools: + - fgbio: + description: A set of tools for working with genomic and high throughput sequencing data, including UMIs + homepage: http://fulcrumgenomics.github.io/fgbio/ + documentation: http://fulcrumgenomics.github.io/fgbio/tools/latest/ + tool_dev_url: https://github.com/fulcrumgenomics/fgbio + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: | + BAM file. Note: the MQ tag is required on reads with mapped mates (!) + pattern: "*.bam" + - strategy: + type: value + description: | + Reguired argument: defines the UMI assignment strategy. + Must be chosen among: Identity, Edit, Adjacency, Paired. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: UMI-grouped BAM + pattern: "*.bam" + - histogram: + type: file + description: A text file containing the tag family size counts + pattern: "*.txt" + +authors: + - "@lescai" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 0fd84d24..4fdd8303 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -394,6 +394,10 @@ fgbio/fastqtobam: - modules/fgbio/fastqtobam/** - tests/modules/fgbio/fastqtobam/** +fgbio/groupreadsbyumi: + - modules/fgbio/groupreadsbyumi/** + - tests/modules/fgbio/groupreadsbyumi/** + fgbio/sortbam: - modules/fgbio/sortbam/** - tests/modules/fgbio/sortbam/** diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf new file mode 100644 index 00000000..31f55724 --- /dev/null +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' addParams( options: [:] ) + +workflow test_fgbio_groupreadsbyumi { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] + + strategy = "Adjacency" + + FGBIO_GROUPREADSBYUMI ( input, strategy ) +} diff --git a/tests/modules/fgbio/groupreadsbyumi/test.yml b/tests/modules/fgbio/groupreadsbyumi/test.yml new file mode 100644 index 00000000..ce70f129 --- /dev/null +++ b/tests/modules/fgbio/groupreadsbyumi/test.yml @@ -0,0 +1,10 @@ +- name: fgbio groupreadsbyumi test_fgbio_groupreadsbyumi + command: nextflow run tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c tests/config/nextflow.config + tags: + - fgbio + - fgbio/groupreadsbyumi + files: + - path: output/fgbio/test_umi-grouped.bam + md5sum: f1e53fc845fd99a3da172eb8063dff0b + - path: output/fgbio/test_umi_histogram.txt + md5sum: d17fd167b2a765d46e4b01bf08ece01b From 2959b4ba070d138d1577acc48d35cc669fbef972 Mon Sep 17 00:00:00 2001 From: Chris Cheshire Date: Fri, 29 Oct 2021 13:22:17 +0100 Subject: [PATCH 187/314] Bedtools sort add extension choice input (#984) * hifiasm copied from fastqc * hifiasm tests init from fastqc * meta.yml init; test.yml and main.nf for printing version * Add hifiasm version printing * Removed spaced on an empty line * Reverted hifiasm from main * Added extension input for bedtools sort * whitespace * Updated docs Co-authored-by: Sviatoslav Sidorov Co-authored-by: Svyatoslav Sidorov --- modules/bedtools/sort/main.nf | 11 ++++++----- modules/bedtools/sort/meta.yml | 19 +++++++++++++------ tests/modules/bedtools/sort/main.nf | 2 +- tests/modules/bedtools/sort/test.yml | 2 +- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index bdba3376..4a51c4b2 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -19,20 +19,21 @@ process BEDTOOLS_SORT { } input: - tuple val(meta), path(bed) + tuple val(meta), path(intervals) + val extension output: - tuple val(meta), path('*.bed'), emit: bed - path "versions.yml" , emit: versions + tuple val(meta), path("*.${extension}"), emit: sorted + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" """ bedtools \\ sort \\ - -i $bed \\ + -i $intervals \\ $options.args \\ - > ${prefix}.bed + > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/bedtools/sort/meta.yml b/modules/bedtools/sort/meta.yml index 5b8b41d7..c7b1b098 100644 --- a/modules/bedtools/sort/meta.yml +++ b/modules/bedtools/sort/meta.yml @@ -15,20 +15,26 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bed: + - intervals: type: file - description: Input BED file - pattern: "*.{bed}" + description: BED/BEDGRAPH + pattern: "*.{bed|bedGraph}" + + - extension: + type: string + description: Extension of the output file (e. g., ".bg", ".bedgraph", ".txt", ".tab", etc.) It is set arbitrarily by the user and corresponds to the file format which depends on arguments. output: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bed: + + - sorted: type: file - description: Sorted BED file - pattern: "*.{bed}" + description: Sorted output file + pattern: "*.${extension}" + - versions: type: file description: File containing software versions @@ -37,3 +43,4 @@ authors: - "@Emiller88" - "@sruthipsuresh" - "@drpatelh" + - "@chris-cheshire" diff --git a/tests/modules/bedtools/sort/main.nf b/tests/modules/bedtools/sort/main.nf index ad1a3df4..b5d34e2f 100644 --- a/tests/modules/bedtools/sort/main.nf +++ b/tests/modules/bedtools/sort/main.nf @@ -9,5 +9,5 @@ workflow test_bedtools_sort { file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ] - BEDTOOLS_SORT ( input ) + BEDTOOLS_SORT ( input, "testext" ) } diff --git a/tests/modules/bedtools/sort/test.yml b/tests/modules/bedtools/sort/test.yml index ceb25f7d..1dd04507 100644 --- a/tests/modules/bedtools/sort/test.yml +++ b/tests/modules/bedtools/sort/test.yml @@ -4,5 +4,5 @@ - bedtools - bedtools/sort files: - - path: ./output/bedtools/test_out.bed + - path: ./output/bedtools/test_out.testext md5sum: fe4053cf4de3aebbdfc3be2efb125a74 From 977d96ed0bd7d813b6f5498e9423d70a02e190ac Mon Sep 17 00:00:00 2001 From: avantonder Date: Fri, 29 Oct 2021 13:33:38 +0100 Subject: [PATCH 188/314] Add Racon module to nf-core/modules (#949) * add racon * add racon * add racon * add racon module * add racon module * edit racon module * edit racon module * edit racon module * edit racon module Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Chris Cheshire --- modules/racon/functions.nf | 78 +++++++++++++++++++++++++++ modules/racon/main.nf | 45 ++++++++++++++++ modules/racon/meta.yml | 52 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 5 +- tests/modules/minimap2/align/test.yml | 2 +- tests/modules/racon/main.nf | 15 ++++++ tests/modules/racon/test.yml | 7 +++ 8 files changed, 206 insertions(+), 2 deletions(-) create mode 100644 modules/racon/functions.nf create mode 100644 modules/racon/main.nf create mode 100644 modules/racon/meta.yml create mode 100644 tests/modules/racon/main.nf create mode 100644 tests/modules/racon/test.yml diff --git a/modules/racon/functions.nf b/modules/racon/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/racon/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/racon/main.nf b/modules/racon/main.nf new file mode 100644 index 00000000..60a5061e --- /dev/null +++ b/modules/racon/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process RACON { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::racon=1.4.20" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1" + } else { + container "quay.io/biocontainers/racon:1.4.20--h9a82719_1" + } + + input: + tuple val(meta), path(reads), path(assembly), path(paf) + + output: + tuple val(meta), path('*_assembly_consensus.fasta.gz') , emit: improved_assembly + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + racon -t "${task.cpus}" \\ + "${reads}" \\ + "${paf}" \\ + $options.args \\ + "${assembly}" > \\ + ${prefix}_assembly_consensus.fasta + + gzip -n ${prefix}_assembly_consensus.fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( racon --version 2>&1 | sed 's/^.*v//' ) + END_VERSIONS + """ +} diff --git a/modules/racon/meta.yml b/modules/racon/meta.yml new file mode 100644 index 00000000..2428f044 --- /dev/null +++ b/modules/racon/meta.yml @@ -0,0 +1,52 @@ +name: racon +description: Consensus module for raw de novo DNA assembly of long uncorrected reads +keywords: + - assembly + - pacbio + - nanopore + - polish +tools: + - racon: + description: Ultrafast consensus module for raw de novo genome assembly of long uncorrected reads. + homepage: https://github.com/lbcb-sci/racon + documentation: https://github.com/lbcb-sci/racon + tool_dev_url: https://github.com/lbcb-sci/racon + doi: https://doi.org/10.1101/gr.214270.116 + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input FastQ files. Racon expects single end reads + pattern: "*.{fastq,fastq.gz,fq,fq.gz}" + - assembly: + type: file + description: Genome assembly to be improved + pattern: "*.{fasta,fa}" + - paf: + type: file + description: Alignment in PAF format + pattern: "*.paf" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - improved_assembly: + type: file + description: Improved genome assembly + pattern: "*_assembly_consensus.fasta.gz" + +authors: + - "@avantonder" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4fdd8303..155ed78d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -923,6 +923,10 @@ quast: - modules/quast/** - tests/modules/quast/** +racon: + - modules/racon/** + - tests/modules/racon/** + rapidnj: - modules/rapidnj/** - tests/modules/rapidnj/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index e8729b9b..c05e1c8f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -11,6 +11,7 @@ params { genome_gff3 = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3" genome_gff3_gz = "${test_data_dir}/genomics/sarscov2/genome/genome.gff3.gz" genome_gtf = "${test_data_dir}/genomics/sarscov2/genome/genome.gtf" + genome_paf = "${test_data_dir}/genomics/sarscov2/genome/genome.paf" genome_sizes = "${test_data_dir}/genomics/sarscov2/genome/genome.sizes" transcriptome_fasta = "${test_data_dir}/genomics/sarscov2/genome/transcriptome.fasta" transcriptome_paf = "${test_data_dir}/genomics/sarscov2/genome/transcriptome.paf" @@ -243,6 +244,7 @@ params { 'bacteroides_fragilis'{ 'genome' { genome_fna_gz = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.fna.gz" + genome_paf = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.paf" } 'illumina' { test1_contigs_fa_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" @@ -253,7 +255,8 @@ params { } 'nanopore' { test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" + overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" } - } + } } } diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index f9b762bb..3309bf4b 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -14,4 +14,4 @@ - minimap2/align files: - path: ./output/minimap2/test.paf - md5sum: 5e7b55a26bf0ea3a2843423d3e0b9a28 + md5sum: 5e7b55a26bf0ea3a2843423d3e0b9a28 \ No newline at end of file diff --git a/tests/modules/racon/main.nf b/tests/modules/racon/main.nf new file mode 100644 index 00000000..b6b864e1 --- /dev/null +++ b/tests/modules/racon/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { RACON } from '../../../modules/racon/main.nf' addParams( options: [:] ) + +workflow test_racon { + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['genome']['genome_paf'], checkIfExists: true) + ] + + RACON ( input ) +} \ No newline at end of file diff --git a/tests/modules/racon/test.yml b/tests/modules/racon/test.yml new file mode 100644 index 00000000..dc8e57dc --- /dev/null +++ b/tests/modules/racon/test.yml @@ -0,0 +1,7 @@ +- name: racon test_racon + command: nextflow run tests/modules/racon -entry test_racon -c tests/config/nextflow.config + tags: + - racon + files: + - path: output/racon/test_assembly_consensus.fasta.gz + md5sum: 96a0ba94c6154f6f37b5a76a0207eb6f From a4943a9e572ac2a5721f598b7bca059633408481 Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Fri, 29 Oct 2021 15:02:02 +0200 Subject: [PATCH 189/314] Mtnucratio (#986) * add new module samtools_depth * fixed main.nf for samtools/depth * add new module mtnucratio * fix main.nf * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * layout in main.nf Co-authored-by: James A. Fellows Yates --- modules/mtnucratio/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/mtnucratio/main.nf | 43 +++++++++++++++++ modules/mtnucratio/meta.yml | 54 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mtnucratio/main.nf | 14 ++++++ tests/modules/mtnucratio/test.yml | 9 ++++ 6 files changed, 202 insertions(+) create mode 100644 modules/mtnucratio/functions.nf create mode 100644 modules/mtnucratio/main.nf create mode 100644 modules/mtnucratio/meta.yml create mode 100644 tests/modules/mtnucratio/main.nf create mode 100644 tests/modules/mtnucratio/test.yml diff --git a/modules/mtnucratio/functions.nf b/modules/mtnucratio/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mtnucratio/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf new file mode 100644 index 00000000..28d08a13 --- /dev/null +++ b/modules/mtnucratio/main.nf @@ -0,0 +1,43 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MTNUCRATIO { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mtnucratio=0.7" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2" + } else { + container "quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2" + } + + input: + tuple val(meta), path(bam) + val(mt_id) + + output: + tuple val(meta), path("*.mtnucratio"), emit: mtnucratio + tuple val(meta), path("*.json") , emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + mtnucratio \\ + $options.args \\ + $bam \\ + $mt_id + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') + END_VERSIONS + """ +} diff --git a/modules/mtnucratio/meta.yml b/modules/mtnucratio/meta.yml new file mode 100644 index 00000000..824af397 --- /dev/null +++ b/modules/mtnucratio/meta.yml @@ -0,0 +1,54 @@ +name: mtnucratio +description: A small Java tool to calculate ratios between MT and nuclear sequencing reads in a given BAM file. +keywords: + - mtnucratio + - ratio + - reads + - bam + - mitochondrial to nuclear ratio + - mitochondria + - statistics +tools: + - mtnucratio: + description: A small tool to determine MT to Nuclear ratios for NGS data. + homepage: https://github.com/apeltzer/MTNucRatioCalculator + documentation: https://github.com/apeltzer/MTNucRatioCalculator + tool_dev_url: https://github.com/apeltzer/MTNucRatioCalculator + doi: "10.1186/s13059-016-0918-z" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: (coordinate) sorted BAM/SAM file + pattern: "*.{bam,sam}" + - mt_id: + type: string + description: Identifier of the contig/chromosome of interest (e.g. chromosome, contig) as in the aligned against reference FASTA file, e.g. mt or chrMT for mitochondria + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - mtnucratio: + type: file + description: Text file containing metrics (mtreads, mt_cov_avg, nucreads, nuc_cov_avg, mt_nuc_ratio) + pattern: "*.mtnucratio" + - json: + type: file + description: JSON file, containing metadata map with sample name, tool name and version, and metrics as in txt file + pattern: "*.json" + +authors: + - "@louperelo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 155ed78d..c0936a81 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -778,6 +778,10 @@ msisensor/scan: - modules/msisensor/scan/** - tests/modules/msisensor/scan/** +mtnucratio: + - modules/mtnucratio/** + - tests/modules/mtnucratio/** + multiqc: - modules/fastqc/** - modules/multiqc/** diff --git a/tests/modules/mtnucratio/main.nf b/tests/modules/mtnucratio/main.nf new file mode 100644 index 00000000..dd9fc9db --- /dev/null +++ b/tests/modules/mtnucratio/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' addParams( options: [:] ) + +workflow test_mtnucratio { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true)] + mt_id = 'mt_id' + + MTNUCRATIO ( input, mt_id ) +} diff --git a/tests/modules/mtnucratio/test.yml b/tests/modules/mtnucratio/test.yml new file mode 100644 index 00000000..76cbaf32 --- /dev/null +++ b/tests/modules/mtnucratio/test.yml @@ -0,0 +1,9 @@ +- name: mtnucratio + command: nextflow run tests/modules/mtnucratio -entry test_mtnucratio -c tests/config/nextflow.config + tags: + - mtnucratio + files: + - path: output/mtnucratio/test.single_end.sorted.bam.mtnucratio + md5sum: 19e96849802c70aa0694785f716274b7 + - path: output/mtnucratio/test.single_end.sorted.bam.mtnucratiomtnuc.json + md5sum: 14d24be6272854d6762f0dfad5918ef6 From 0a7368aa6f89a63229f5e04fa937851348e16ccf Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Fri, 29 Oct 2021 16:37:39 +0200 Subject: [PATCH 190/314] Refactor genrich to avoid params in main (#981) * Refactor genrich to avoid params in main * Missed to commit test.yml file --- modules/genrich/main.nf | 19 +++++++----- modules/genrich/meta.yml | 15 ++++++++-- tests/modules/genrich/main.nf | 53 ++++++++++++++++++++++++++++------ tests/modules/genrich/test.yml | 20 +++++++++---- 4 files changed, 81 insertions(+), 26 deletions(-) diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index c947e9cf..f34f9cd2 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -22,6 +22,10 @@ process GENRICH { tuple val(meta), path(treatment_bam) path control_bam path blacklist_bed + val save_pvalues + val save_pileup + val save_bed + val save_duplicates output: tuple val(meta), path("*narrowPeak") , emit: peaks @@ -32,14 +36,14 @@ process GENRICH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def control = params.control_bam ? "-c $control_bam" : '' - def pvalues = params.pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" - def pileup = params.pileup ? "-k ${prefix}.pileup.bedGraph" : "" - def bed = params.bed ? "-b ${prefix}.intervals.bed" : "" - def blacklist = params.blacklist_bed ? "-E $blacklist_bed" : "" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def control = control_bam ? "-c $control_bam" : '' + def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" + def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" + def pileup = save_pileup ? "-k ${prefix}.pileup.bedGraph" : "" + def bed = save_bed ? "-b ${prefix}.intervals.bed" : "" def duplicates = "" - if (params.save_duplicates) { + if (save_duplicates) { if (options.args.contains('-r')) { duplicates = "-R ${prefix}.duplicates.txt" } else { @@ -58,7 +62,6 @@ process GENRICH { $pileup \\ $bed \\ $duplicates \\ - $blacklist \\ $control cat <<-END_VERSIONS > versions.yml diff --git a/modules/genrich/meta.yml b/modules/genrich/meta.yml index 8f7b004b..37184190 100644 --- a/modules/genrich/meta.yml +++ b/modules/genrich/meta.yml @@ -15,7 +15,6 @@ tools: tool_dev_url: https://github.com/jsh58/Genrich doi: "" licence: ['MIT'] - input: - meta: type: map @@ -34,7 +33,18 @@ input: type: file description: Bed file containing genomic intervals to exclude from the analysis pattern: "*.{bed}" - + - save_pvalues: + type: boolean + description: Create bedgraph-ish file for p/q-values file + - save_pileup: + type: boolean + description: Create bedgraph-ish file for pileups and p-values + - save_bed: + type: boolean + description: Create BED file for reads/fragments/intervals + - save_duplicates: + type: boolean + description: Create PCR duplicates file (only works if -r option is set) output: - meta: type: map @@ -65,7 +75,6 @@ output: type: file description: File containing software version pattern: "*.{version.txt}" - authors: - "@JoseEspinosa" diff --git a/tests/modules/genrich/main.nf b/tests/modules/genrich/main.nf index 654b38e5..aa1a2d49 100644 --- a/tests/modules/genrich/main.nf +++ b/tests/modules/genrich/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { GENRICH } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:false, save_duplicates:false, options: ["args": "-p 0.1"] ) -include { GENRICH as GENRICH_BLACKLIST } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:true, save_duplicates:false, options: ["args": "-p 0.1"] ) -include { GENRICH as GENRICH_ALL_OUTPUTS } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: true, pileup:true, bed:true, blacklist_bed:false, save_duplicates:true, options: ["args": "-r -p 0.1"] ) -include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( control_bam: false, pvalues: false, pileup:false, bed:false, blacklist_bed:false, save_duplicates:false, options: ["args": "-j -p 0.1"] ) +include { GENRICH } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.1"] ) +include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.9"] ) +include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-r -p 0.1"] ) +include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-j -p 0.1"] ) workflow test_genrich { input = [ [ id:'test', single_end:false ], // meta map @@ -13,7 +13,12 @@ workflow test_genrich { control = [ ] blacklist = [ ] - GENRICH ( input, control, blacklist ) + save_pvalues = false + save_pileup = false + save_bed = false + save_duplicates = false + + GENRICH ( input, control, blacklist, save_pvalues, save_pileup, save_bed, save_duplicates ) } workflow test_genrich_ctrl { @@ -22,7 +27,12 @@ workflow test_genrich_ctrl { control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] blacklist = [ ] - GENRICH ( input, control, blacklist ) + save_pvalues = false + save_pileup = false + save_bed = false + save_duplicates = false + + GENRICH_CTRL ( input, control, blacklist, save_pvalues, save_pileup, save_bed, save_duplicates ) } workflow test_genrich_all_outputs { @@ -31,14 +41,39 @@ workflow test_genrich_all_outputs { control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] blacklist = [ ] - GENRICH_ALL_OUTPUTS ( input, control, blacklist ) + save_pvalues = true + save_pileup = true + save_bed = true + save_duplicates = true + + GENRICH_ALL ( input, control, blacklist, save_pvalues, save_pileup, save_bed, save_duplicates ) +} + +workflow test_genrich_blacklist { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] + control = [ ] + blacklist = [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + save_pvalues = false + save_pileup = false + save_bed = false + save_duplicates = false + + GENRICH ( input, control, blacklist, save_pvalues, save_pileup, save_bed, save_duplicates ) } workflow test_genrich_atacseq { input = [ [ id:'test', single_end:false ], // meta map [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ]] - control = [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ] + control = [ ] blacklist = [ ] - GENRICH_ATACSEQ ( input, control, blacklist ) + save_pvalues = false + save_pileup = false + save_bed = false + save_duplicates = false + + GENRICH_ATACSEQ ( input, control, blacklist, save_pvalues, save_pileup, save_bed, save_duplicates ) } + diff --git a/tests/modules/genrich/test.yml b/tests/modules/genrich/test.yml index bd762f7c..63bf2927 100644 --- a/tests/modules/genrich/test.yml +++ b/tests/modules/genrich/test.yml @@ -12,7 +12,7 @@ - genrich files: - path: output/genrich/test.narrowPeak - md5sum: 6afabdd3f691c7c84c66ff8a23984681 + md5sum: 2fcc392360b317f5ebee88cdbc149e05 - name: genrich test_genrich_all_outputs command: nextflow run tests/modules/genrich -entry test_genrich_all_outputs -c tests/config/nextflow.config @@ -20,15 +20,23 @@ - genrich files: - path: output/genrich/test.duplicates.txt - md5sum: a92893f905fd8b3751bc6a960fbfe7ba + md5sum: 159d557af7c23bc3cfb802d87fa96c34 - path: output/genrich/test.intervals.bed - md5sum: 52edf47e6641c0cc03f9cca7324f7eaa + md5sum: 4bea65caa3f4043d703af4b57161112e - path: output/genrich/test.narrowPeak - md5sum: e45eb7d000387975050c2e85c164e5be + md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/genrich/test.pileup.bedGraph - md5sum: e4f7fa664cd4ed2cf3a1a3a9eb415e71 + md5sum: 03e53848de695b5794f32f15b2709203 - path: output/genrich/test.pvalues.bedGraph - md5sum: 564859953704983393d4b7d6317060cd + md5sum: b14feef34b6d2379a173a734ca963cde + +- name: genrich test_genrich_blacklist + command: nextflow run tests/modules/genrich -entry test_genrich_blacklist -c tests/config/nextflow.config + tags: + - genrich + files: + - path: output/genrich/test.narrowPeak + md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_atacseq command: nextflow run tests/modules/genrich -entry test_genrich_atacseq -c tests/config/nextflow.config From 3df4fe60851c19ec8d55ef3a978ae851eaccfd68 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 29 Oct 2021 17:05:06 +0200 Subject: [PATCH 191/314] add freebayes/somatic and update freebayes/germline (#990) * add freebayes/somatic and update freebayes/germline to new syntax and cram * add pytest * update with new freebayes/germline path --- modules/freebayes/{ => germline}/functions.nf | 0 modules/freebayes/{ => germline}/main.nf | 31 ++++---- modules/freebayes/{ => germline}/meta.yml | 10 +-- modules/freebayes/somatic/functions.nf | 78 +++++++++++++++++++ modules/freebayes/somatic/main.nf | 74 ++++++++++++++++++ modules/freebayes/somatic/meta.yml | 66 ++++++++++++++++ modules/gatk4/applybqsr/meta.yml | 1 + tests/config/pytest_modules.yml | 10 ++- tests/modules/freebayes/germline/main.nf | 51 ++++++++++++ tests/modules/freebayes/germline/test.yml | 26 +++++++ tests/modules/freebayes/main.nf | 35 --------- tests/modules/freebayes/somatic/main.nf | 37 +++++++++ tests/modules/freebayes/somatic/test.yml | 17 ++++ tests/modules/freebayes/test.yml | 14 ---- 14 files changed, 378 insertions(+), 72 deletions(-) rename modules/freebayes/{ => germline}/functions.nf (100%) rename modules/freebayes/{ => germline}/main.nf (75%) rename modules/freebayes/{ => germline}/meta.yml (95%) create mode 100644 modules/freebayes/somatic/functions.nf create mode 100644 modules/freebayes/somatic/main.nf create mode 100644 modules/freebayes/somatic/meta.yml create mode 100644 tests/modules/freebayes/germline/main.nf create mode 100644 tests/modules/freebayes/germline/test.yml delete mode 100644 tests/modules/freebayes/main.nf create mode 100644 tests/modules/freebayes/somatic/main.nf create mode 100644 tests/modules/freebayes/somatic/test.yml delete mode 100644 tests/modules/freebayes/test.yml diff --git a/modules/freebayes/functions.nf b/modules/freebayes/germline/functions.nf similarity index 100% rename from modules/freebayes/functions.nf rename to modules/freebayes/germline/functions.nf diff --git a/modules/freebayes/main.nf b/modules/freebayes/germline/main.nf similarity index 75% rename from modules/freebayes/main.nf rename to modules/freebayes/germline/main.nf index 63235d8a..eae62036 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/germline/main.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getProcessName; getSoftwareName } from './func params.options = [:] options = initOptions(params.options) -process FREEBAYES { +process FREEBAYES_GERMLINE { tag "$meta.id" label 'process_low' publishDir "${params.outdir}", @@ -19,24 +19,25 @@ process FREEBAYES { } input: - tuple val(meta), path(bam), path(bai) - tuple path(fasta), path(fai) - path(targets) - path(samples) - path(populations) - path(cnv) - + tuple val(meta), path(input), path(input_index) + path fasta + path fai + path targets + path samples + path populations + path cnv output: tuple val(meta), path("*.vcf.gz") , emit: vcf path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def targets_file = targets ? "--target ${targets}" : "" - def samples_file = samples ? "--samples ${samples}" : "" - def populations_file = populations ? "--populations ${populations}" : "" - def cnv_file = cnv ? "--cnv-map ${cnv}" : "" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def targets_file = targets ? "--target ${targets}" : "" + def samples_file = samples ? "--samples ${samples}" : "" + def populations_file = populations ? "--populations ${populations}" : "" + def cnv_file = cnv ? "--cnv-map ${cnv}" : "" + if (task.cpus > 1) { """ freebayes-parallel \\ @@ -47,7 +48,7 @@ process FREEBAYES { $populations_file \\ $cnv_file \\ $options.args \\ - $bam > ${prefix}.vcf + $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf @@ -66,7 +67,7 @@ process FREEBAYES { $populations_file \\ $cnv_file \\ $options.args \\ - $bam > ${prefix}.vcf + $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf diff --git a/modules/freebayes/meta.yml b/modules/freebayes/germline/meta.yml similarity index 95% rename from modules/freebayes/meta.yml rename to modules/freebayes/germline/meta.yml index 46eb5309..86650715 100644 --- a/modules/freebayes/meta.yml +++ b/modules/freebayes/germline/meta.yml @@ -1,4 +1,4 @@ -name: freebayes +name: freebayes_germline description: A haplotype-based variant detector keywords: - variant caller @@ -12,7 +12,7 @@ tools: homepage: https://github.com/freebayes/freebayes documentation: https://github.com/freebayes/freebayes tool_dev_url: https://github.com/freebayes/freebayes - doi: "" + doi: "arXiv:1207.3907" licence: ['MIT'] input: @@ -21,11 +21,11 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - bai: + - input_index: type: file description: BAM/CRAM/SAM index file pattern: "*.bam.bai" @@ -59,7 +59,6 @@ input: seq_name start end sample_name copy_number pattern: "*.bed" - output: - meta: type: map @@ -76,3 +75,4 @@ output: pattern: "*.vcf.gz" authors: - "@maxibor" + - "@FriederikeHanssen" diff --git a/modules/freebayes/somatic/functions.nf b/modules/freebayes/somatic/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/freebayes/somatic/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/freebayes/somatic/main.nf b/modules/freebayes/somatic/main.nf new file mode 100644 index 00000000..c1579661 --- /dev/null +++ b/modules/freebayes/somatic/main.nf @@ -0,0 +1,74 @@ +// Import generic module functions +include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FREEBAYES_SOMATIC { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" + } else { + container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" + } + + input: + tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) + path fasta + path fai + path targets + path samples + + output: + tuple val(meta), path("*.vcf.gz") , emit: vcf + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def targets_file = targets ? "--target ${targets}" : "" + def samples_file = samples ? "--samples ${samples}" : "" + + if (task.cpus > 1) { + """ + freebayes-parallel \\ + <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ + -f $fasta \\ + $targets_file \\ + $samples_file \\ + $options.args \\ + $input_tumor \\ + $input_normal > ${prefix}.vcf + + gzip --no-name ${prefix}.vcf + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + END_VERSIONS + """ + + } else { + """ + freebayes \\ + -f $fasta \\ + $targets_file \\ + $samples_file \\ + $options.args \\ + $input_tumor \\ + $input_normal > ${prefix}.vcf + + gzip --no-name ${prefix}.vcf + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + END_VERSIONS + """ + } +} diff --git a/modules/freebayes/somatic/meta.yml b/modules/freebayes/somatic/meta.yml new file mode 100644 index 00000000..391e5007 --- /dev/null +++ b/modules/freebayes/somatic/meta.yml @@ -0,0 +1,66 @@ +name: freebayes_somatic +description: A haplotype-based variant detector +keywords: + - variant caller + - SNP + - genotyping + - somatic variant calling + - bayesian + +tools: + - freebayes: + description: Bayesian haplotype-based polymorphism discovery and genotyping + homepage: https://github.com/freebayes/freebayes + documentation: https://github.com/freebayes/freebayes + tool_dev_url: https://github.com/freebayes/freebayes + doi: "arXiv:1207.3907" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - input_index: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.bam.bai" + - fasta: + type: file + description: reference fasta file + pattern: ".{fa,fa.gz,fasta,fasta.gz}" + - fai: + type: file + description: reference fasta file index + pattern: "*.fai" + - targets: + type: file + description: Optional - Limit analysis to targets listed in this BED-format FILE. + pattern: "*.bed" + - samples: + type: file + description: Optional - Limit analysis to samples listed (one per line) in the FILE. + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" + - vcf: + type: file + description: Compressed VCF file + pattern: "*.vcf.gz" + +authors: + - "@FriederikeHanssen" diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index b002dca6..e7419860 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -61,3 +61,4 @@ output: authors: - "@yocra3" + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c0936a81..d7fed0c0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -410,9 +410,13 @@ flash: - modules/flash/** - tests/modules/flash/** -freebayes: - - modules/freebayes/** - - tests/modules/freebayes/** +freebayes/germline: + - modules/freebayes/germline/** + - tests/modules/freebayes/germline/** + +freebayes/somatic: + - modules/freebayes/somatic/** + - tests/modules/freebayes/somatic/** gatk4/applybqsr: - modules/gatk4/applybqsr/** diff --git a/tests/modules/freebayes/germline/main.nf b/tests/modules/freebayes/germline/main.nf new file mode 100644 index 00000000..1b39eea0 --- /dev/null +++ b/tests/modules/freebayes/germline/main.nf @@ -0,0 +1,51 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FREEBAYES_GERMLINE } from '../../../../modules/freebayes/germline/main.nf' addParams( options: [:] ) + +workflow test_freebayes { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_bed { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + samples = [] + populations = [] + cnv = [] + + FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_cram { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) +} diff --git a/tests/modules/freebayes/germline/test.yml b/tests/modules/freebayes/germline/test.yml new file mode 100644 index 00000000..55925b92 --- /dev/null +++ b/tests/modules/freebayes/germline/test.yml @@ -0,0 +1,26 @@ +- name: freebayes germline test_freebayes + command: nextflow run tests/modules/freebayes/germline -entry test_freebayes -c tests/config/nextflow.config + tags: + - freebayes + - freebayes/germline + files: + - path: output/freebayes/test.vcf.gz + md5sum: 1ec210ad27514c7a4140c924dc66d979 + +- name: freebayes germline test_freebayes_bed + command: nextflow run tests/modules/freebayes/germline -entry test_freebayes_bed -c tests/config/nextflow.config + tags: + - freebayes + - freebayes/germline + files: + - path: output/freebayes/test.vcf.gz + md5sum: e8923cccd5dac196f72d3d3997a60706 + +- name: freebayes germline test_freebayes_cram + command: nextflow run tests/modules/freebayes/germline -entry test_freebayes_cram -c tests/config/nextflow.config + tags: + - freebayes + - freebayes/germline + files: + - path: output/freebayes/test.vcf.gz + md5sum: cb57a3ed154618e3aa4a5272fcfb7521 diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf deleted file mode 100644 index 1c07b821..00000000 --- a/tests/modules/freebayes/main.nf +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) - -workflow test_freebayes { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] - reference = [file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)] - targets = [] - samples = [] - populations = [] - cnv = [] - - FREEBAYES ( input, reference, targets, samples, populations, cnv) -} - -workflow test_freebayes_bed { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] - reference = [file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)] - targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) - samples = [] - populations = [] - cnv = [] - - FREEBAYES ( input, reference, targets, samples, populations, cnv) -} diff --git a/tests/modules/freebayes/somatic/main.nf b/tests/modules/freebayes/somatic/main.nf new file mode 100644 index 00000000..d26caf34 --- /dev/null +++ b/tests/modules/freebayes/somatic/main.nf @@ -0,0 +1,37 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FREEBAYES_SOMATIC } from '../../../../modules/freebayes/somatic/main.nf' addParams( options: [:] ) + +workflow test_freebayes { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + + FREEBAYES_SOMATIC ( input, fasta, fai, targets, samples) +} + +workflow test_freebayes_intervals { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + samples = [] + + FREEBAYES_SOMATIC ( input, fasta, fai, targets, samples) +} diff --git a/tests/modules/freebayes/somatic/test.yml b/tests/modules/freebayes/somatic/test.yml new file mode 100644 index 00000000..93113e60 --- /dev/null +++ b/tests/modules/freebayes/somatic/test.yml @@ -0,0 +1,17 @@ +- name: freebayes somatic test_freebayes + command: nextflow run tests/modules/freebayes/somatic -entry test_freebayes -c tests/config/nextflow.config + tags: + - freebayes/somatic + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 1c47d02f27ec5918558c8688ce6e7780 + +- name: freebayes somatic test_freebayes_intervals + command: nextflow run tests/modules/freebayes/somatic -entry test_freebayes_intervals -c tests/config/nextflow.config + tags: + - freebayes/somatic + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 5b8a12666bde63746dcec7afcd3ef789 diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml deleted file mode 100644 index 9ca54021..00000000 --- a/tests/modules/freebayes/test.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: freebayes test_freebayes - command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config - tags: - - freebayes - files: - - path: output/freebayes/test.vcf.gz - -- name: freebayes test_freebayes_bed - command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config - tags: - - freebayes - files: - - path: output/freebayes/test.vcf.gz - From 7afb962f0bd5d9b504331082fa4bafd498074a4b Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Fri, 29 Oct 2021 17:21:34 +0100 Subject: [PATCH 192/314] New module genomicsdbimport (#857) * saving changes to checkout * saving to sort out other branch * removed yml tracking of files that cant be tracked due to directory name changing between runs * test data added, ready for pr * fix eol linting error * Update modules/gatk4/genomicsdbimport/main.nf Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> * merging with master * update push to show progress * tests now working untar able to pass data to genomicsdbimport * commit to checkout * tests updated, module reworked to simplify and emit updated gendb * Apply suggestions from code review Co-authored-by: Harshil Patel * update meta.yml Priority of input options changed, updated to reflect this * Update test.yml name prefix changed in main script, test.yml updated to reflect this * fix tests due to review changes Co-authored-by: GCJMackenzie Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> Co-authored-by: Harshil Patel --- modules/gatk4/genomicsdbimport/functions.nf | 78 ++++++++++++++++++ modules/gatk4/genomicsdbimport/main.nf | 67 ++++++++++++++++ modules/gatk4/genomicsdbimport/meta.yml | 80 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 2 + tests/modules/gatk4/genomicsdbimport/main.nf | 61 ++++++++++++++ tests/modules/gatk4/genomicsdbimport/test.yml | 50 ++++++++++++ 7 files changed, 342 insertions(+) create mode 100644 modules/gatk4/genomicsdbimport/functions.nf create mode 100644 modules/gatk4/genomicsdbimport/main.nf create mode 100644 modules/gatk4/genomicsdbimport/meta.yml create mode 100644 tests/modules/gatk4/genomicsdbimport/main.nf create mode 100644 tests/modules/gatk4/genomicsdbimport/test.yml diff --git a/modules/gatk4/genomicsdbimport/functions.nf b/modules/gatk4/genomicsdbimport/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/genomicsdbimport/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf new file mode 100644 index 00000000..aa4fceb0 --- /dev/null +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -0,0 +1,67 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_GENOMICSDBIMPORT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(vcf), path(tbi), path(intervalfile), val(intervalval), path(wspace) + val run_intlist + val run_updatewspace + val input_map + + output: + tuple val(meta), path("*_genomicsdb") , optional:true, emit: genomicsdb + tuple val(meta), path("$updated_db") , optional:true, emit: updatedb + tuple val(meta), path("*.interval_list"), optional:true, emit: intervallist + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + // settings for running default create gendb mode + def inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V')}" + def dir_command = "--genomicsdb-workspace-path ${prefix}" + def intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " + + // settings changed for running get intervals list mode if run_intlist is true + if (run_intlist) { + inputs_command = '' + dir_command = "--genomicsdb-update-workspace-path ${wspace}" + intervals_command = "--output-interval-list-to-file ${prefix}.interval_list" + } + + // settings changed for running update gendb mode. inputs_command same as default, update_db forces module to emit the updated gendb + if (run_updatewspace) { + dir_command = "--genomicsdb-update-workspace-path ${wspace}" + intervals_command = '' + updated_db = wspace.toString() + } + + """ + gatk GenomicsDBImport \\ + $inputs_command \\ + $dir_command \\ + $intervals_command \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/genomicsdbimport/meta.yml b/modules/gatk4/genomicsdbimport/meta.yml new file mode 100644 index 00000000..f7a32e7e --- /dev/null +++ b/modules/gatk4/genomicsdbimport/meta.yml @@ -0,0 +1,80 @@ +name: gatk4_genomicsdbimport +description: merge GVCFs from multiple samples. For use in joint genotyping or somatic panel of normal creation. +keywords: + - gatk4 + - genomicsdbimport + - genomicsdb + - panelofnormalscreation + - jointgenotyping +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - vcf: + type: list + description: either a list of vcf files to be used to create or update a genomicsdb, or a file that contains a map to vcf files to be used. + pattern: "*.vcf.gz" + + - tbi: + type: list + description: list of tbi files that match with the input vcf files + pattern: "*.vcf.gz_tbi" + + - wspace: + type: path + description: path to an existing genomicsdb to be used in update db mode or get intervals mode. This WILL NOT specify name of a new genomicsdb in create db mode. + pattern: "/path/to/existing/gendb" + + - intervalfile: + type: file + description: file containing the intervals to be used when creating the genomicsdb + pattern: "*.interval_list" + + - intervalval: + type: string + description: if an intervals file has not been spcified, the value enetered here will be used as an interval via the "-L" argument + pattern: "example: chr1:1000-10000" + + - run_intlist: + type: boolean + description: Specify whether to run get interval list mode, this option cannot be specified at the same time as run_updatewspace. + pattern: "true/false" + + - run_updatewspace: + type: boolean + description: Specify whether to run update genomicsdb mode, this option takes priority over run_intlist. + pattern: "true/false" + + - input_map: + type: boolean + description: Specify whether the vcf input is providing a list of vcf file(s) or a single file containing a map of paths to vcf files to be used to create or update a genomicsdb. + pattern: "*.sample_map" + +output: + - genomicsdb: + type: directory + description: Directory containing the files that compose the genomicsdb workspace, this is only output for create mode, as update changes an existing db + pattern: "*_genomicsdb" + - intervallist: + type: file + description: File containing the intervals used to generate the genomicsdb, only created by get intervals mode. + pattern: "*.interval_list" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d7fed0c0..db10b55b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -450,6 +450,10 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** +gatk4/genomicsdbimport: + - modules/gatk4/genomicsdbimport/** + - tests/modules/gatk4/genomicsdbimport/** + gatk4/filtermutectcalls: - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index c05e1c8f..d0489e03 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -103,6 +103,7 @@ params { genome_dict = "${test_data_dir}/genomics/homo_sapiens/genome/genome.dict" genome_gff3 = "${test_data_dir}/genomics/homo_sapiens/genome/genome.gff3" genome_gtf = "${test_data_dir}/genomics/homo_sapiens/genome/genome.gtf" + genome_interval_list = "${test_data_dir}/genomics/homo_sapiens/genome/genome.interval_list" genome_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/genome.sizes" genome_bed = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed" genome_header = "${test_data_dir}/genomics/homo_sapiens/genome/genome.header" @@ -181,6 +182,7 @@ params { test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" test_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.pileups.table" test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" + test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf new file mode 100644 index 00000000..ef67b04a --- /dev/null +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -0,0 +1,61 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) +include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' addParams( options: [:] ) + +workflow test_gatk4_genomicsdbimport_create_genomicsdb { + + input = [ [ id:'test_genomicsdb'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) , + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) , + file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) , + [] , + [] ] + + run_intlist = false + run_updatewspace = false + input_map = false + + GATK4_GENOMICSDBIMPORT ( input, run_intlist, run_updatewspace, input_map ) +} + +workflow test_gatk4_genomicsdbimport_get_intervalslist { + db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( db ) + + def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + [] , + [] , + [] , + [] ]) + .combine(UNTAR.out.untar) + + run_intlist = true + run_updatewspace = false + input_map = false + + GATK4_GENOMICSDBIMPORT ( input, run_intlist, run_updatewspace, input_map ) +} + +workflow test_gatk4_genomicsdbimport_update_genomicsdb { + db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( db ) + + def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'] , checkIfExists: true) , + file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz_tbi'] , checkIfExists: true) , + [] , + [] ]) + .combine(UNTAR.out.untar) + + run_intlist = false + run_updatewspace = true + input_map = false + + GATK4_GENOMICSDBIMPORT ( input, run_intlist, run_updatewspace, input_map ) + +} diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml new file mode 100644 index 00000000..68f5ae7a --- /dev/null +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -0,0 +1,50 @@ +- name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_create_genomicsdb + command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c tests/config/nextflow.config + tags: + - gatk4/genomicsdbimport + - gatk4 + files: + - path: output/gatk4/test_genomicsdb/__tiledb_workspace.tdb + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_genomicsdb/callset.json + md5sum: a7d07d1c86449bbb1091ff29368da07a + - path: output/gatk4/test_genomicsdb/chr22$1$40001/.__consolidation_lock + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_genomicsdb/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + md5sum: 2502f79658bc000578ebcfddfc1194c0 + - path: output/gatk4/test_genomicsdb/vcfheader.vcf + contains: + - "FORMAT= Date: Fri, 29 Oct 2021 17:45:32 +0100 Subject: [PATCH 193/314] FIX: Createsomaticpanelofnormals add requested test changes (#983) * files created for createsompon, script written, meta written, still needs tests * updated to 2.0.0 method input, however this requires a genomicsDB input now * script finished, meta yaml updated. Tests working locally, test yaml made, needs genomicsdb example on nf-core to run repository tests * versions updated, issue with test data not able to download directory * updated tests to include repo-side data * Apply suggestions from code review * Update modules/gatk4/createsomaticpanelofnormals/main.nf * temp commit to allow checkout * updated createsompon tests to use tarred gendb * resolve conflict * Update tests/modules/gatk4/createsomaticpanelofnormals/main.nf Co-authored-by: Harshil Patel Co-authored-by: GCJMackenzie Co-authored-by: Harshil Patel --- .../gatk4/createsomaticpanelofnormals/main.nf | 61 ++----------------- 1 file changed, 5 insertions(+), 56 deletions(-) diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf index 34fc9847..6e5366f5 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -2,67 +2,16 @@ nextflow.enable.dsl = 2 +include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' addParams( options: [suffix:'.pon'] ) workflow test_gatk4_createsomaticpanelofnormals { - maindir = file('test_genomicsdb') - subdir1 = file('test_genomicsdb/chr22$1$40001') - subdir2 = file('test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448') - subdir3 = file('test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir') - subdir2.mkdirs() - subdir3.mkdirs() + db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/__tiledb_workspace.tdb' , checkIfExists: true).copyTo(maindir) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/vcfheader.vcf' , checkIfExists: true).copyTo(maindir) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/vidmap.json' , checkIfExists: true).copyTo(maindir) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/callset.json' , checkIfExists: true).copyTo(maindir) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/.__consolidation_lock' , checkIfExists: true).copyTo(subdir1) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__array_schema.tdb' , checkIfExists: true).copyTo(subdir1) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json' , checkIfExists: true).copyTo(subdir3) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_meta_2b25a6c2-cb94-4a4a-9005-acb7c595d322.json' , checkIfExists: true).copyTo(subdir3) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/AD.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/AD_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ALT.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ALT_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/BaseQRankSum.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DB.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DP.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/DP_FORMAT.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/END.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ExcessHet.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/FILTER.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/FILTER_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GQ.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GT.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/GT_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ID.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ID_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/InbreedingCoeff.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MIN_DP.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAC.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAC_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAF.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MLEAF_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/MQRankSum.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PGT.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PGT_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PID.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PID_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PL.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PL_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/PS.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/QUAL.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/RAW_MQandDP.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/REF.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/REF_var.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/ReadPosRankSum.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/SB.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__book_keeping.tdb.gz' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__coords.tdb' , checkIfExists: true).copyTo(subdir2) - file( 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/illumina/gatk/test_genomicsdb/chr22$1$40001/__3cf81648-433d-4464-be08-23d082445c9b139814474716928_1630588248448/__tiledb_fragment.tdb' , checkIfExists: true).copyTo(subdir2) + UNTAR ( db ) - input = [ [ id:'test' ], // meta map - file( maindir , checkIfExists: true)] + input = Channel.of([ id:'test']) + .combine(UNTAR.out.untar) fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) From 9fb26ae46248c33ad858d79a36beb07912c85a89 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Sat, 30 Oct 2021 09:52:13 +0100 Subject: [PATCH 194/314] Add IDR module (#908) * Add IDR module * Add meta and implement main todos * Modifying idr tests * Update tests/config/test_data.config Co-authored-by: Harshil Patel * Update tests/config/test_data.config Co-authored-by: Harshil Patel * Update main.nf * Update tests/config/test_data.config Co-authored-by: Harshil Patel * Update test with new file name Co-authored-by: Jose Espinosa-Carrasco --- modules/idr/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/idr/main.nf | 56 +++++++++++++++++++++++ modules/idr/meta.yml | 53 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 6 +++ tests/modules/idr/main.nf | 35 +++++++++++++++ tests/modules/idr/test.yml | 35 +++++++++++++++ 7 files changed, 267 insertions(+) create mode 100644 modules/idr/functions.nf create mode 100644 modules/idr/main.nf create mode 100644 modules/idr/meta.yml create mode 100644 tests/modules/idr/main.nf create mode 100644 tests/modules/idr/test.yml diff --git a/modules/idr/functions.nf b/modules/idr/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/idr/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/idr/main.nf b/modules/idr/main.nf new file mode 100644 index 00000000..006826ac --- /dev/null +++ b/modules/idr/main.nf @@ -0,0 +1,56 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process IDR { + tag "$prefix" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::idr=2.0.4.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5" + } else { + container "quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5" + } + + input: + path peaks + val peak_type + val prefix + + output: + path "*idrValues.txt", emit: idr + path "*log.txt" , emit: log + path "*.png" , emit: png + path "versions.yml" , emit: versions + + script: + if (peaks.toList().size < 2) { + log.error "[ERROR] idr needs at least two replicates only one provided." + } + def peak_types = ['narrowPeak', 'broadPeak', 'bed'] + if (!peak_types.contains(peak_type)) { + log.error "[ERROR] Invalid option: '${peak_type}'. Valid options for 'peak_type': ${peak_types.join(', ')}." + } + def idr_vals = prefix ? "${prefix}.idrValues.txt" : "idrValues.txt" + def log_file = prefix ? "${prefix}.log.txt" : "log.txt" + """ + idr \\ + --samples $peaks \\ + --input-file-type $peak_type \\ + --output-file $idr_vals \\ + --log-output-file $log_file \\ + --plot \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/idr/meta.yml b/modules/idr/meta.yml new file mode 100644 index 00000000..c89e72a4 --- /dev/null +++ b/modules/idr/meta.yml @@ -0,0 +1,53 @@ +name: idr +description: | + Measures reproducibility of ChIP-seq, ATAC-seq peaks using IDR (Irreproducible + Discovery Rate) +keywords: + - IDR + - peaks + - ChIP-seq + - ATAC-seq +tools: + - idr: + description: | + The IDR (Irreproducible Discovery Rate) framework is a unified approach + to measure the reproducibility of findings identified from replicate + experiments and provide highly stable thresholds based on reproducibility. + homepage: None + documentation: None + tool_dev_url: https://github.com/kundajelab/idr + doi: "" + licence: ['GPL v2'] +input: + - peaks: + type: tuple of two files + description: BED, narrowPeak or broadPeak files of replicates + pattern: "*" + - peak_type: + type: value + description: Type of peak file + pattern: "{narrowPeak,broadPeak,bed}" + - prefix: + type: value + description: Prefix for output files +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - idr: + type: file + description: Text file containing IDR values + pattern: "*.{txt}" + - log: + type: file + description: Log file + pattern: "*.{txt}" + - png: + type: file + description: Plot generated by idr + pattern: "*{.png}" + +authors: + - "@drpatelh" + - "@joseespinosa" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index db10b55b..ea999b6a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -597,6 +597,10 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** +idr: + - modules/idr/** + - tests/modules/idr/** + iqtree: - modules/iqtree/** - tests/modules/iqtree/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index d0489e03..12252542 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -202,6 +202,12 @@ params { test2_genome_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test2.genome.vcf.gz.tbi" test2_genome_vcf_idx = "${test_data_dir}/genomics/homo_sapiens/illumina/gvcf/test2.genome.vcf.idx" + test_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test.broadPeak" + test2_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test2.broadPeak" + + test_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test.narrowPeak" + test2_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test2.narrowPeak" + test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_1.fastq.gz" test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_2.fastq.gz" diff --git a/tests/modules/idr/main.nf b/tests/modules/idr/main.nf new file mode 100644 index 00000000..aa141a57 --- /dev/null +++ b/tests/modules/idr/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { IDR } from '../../../modules/idr/main.nf' addParams( options: [:] ) + +workflow test_idr_narrowpeak { + + input = [ + file(params.test_data['homo_sapiens']['illumina']['test_narrowpeak'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_narrowpeak'], checkIfExists: true) + ] + + IDR ( input, 'narrowPeak', 'test' ) +} + +workflow test_idr_broadpeak { + + input = [ + file(params.test_data['homo_sapiens']['illumina']['test_broadpeak'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_broadpeak'], checkIfExists: true) + ] + + IDR ( input, 'broadPeak', 'test' ) +} + +workflow test_idr_noprefix { + + input = [ + file(params.test_data['homo_sapiens']['illumina']['test_narrowpeak'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_narrowpeak'], checkIfExists: true) + ] + + IDR ( input, 'narrowPeak', '' ) +} diff --git a/tests/modules/idr/test.yml b/tests/modules/idr/test.yml new file mode 100644 index 00000000..35ee4bc9 --- /dev/null +++ b/tests/modules/idr/test.yml @@ -0,0 +1,35 @@ +- name: idr test_idr_narrowpeak + command: nextflow run tests/modules/idr -entry test_idr_narrowpeak -c tests/config/nextflow.config + tags: + - idr + files: + - path: output/idr/test.idrValues.txt + md5sum: 09be837cc6abbc3eb5958b74802eea55 + - path: output/idr/test.idrValues.txt.png + md5sum: 4a7143ccc0ccadb37c2317bf626e6d96 + - path: output/idr/test.log.txt + md5sum: 6443507ac66b9d3b64bc56b78328083e + +- name: idr test_idr_broadpeak + command: nextflow run tests/modules/idr -entry test_idr_broadpeak -c tests/config/nextflow.config + tags: + - idr + files: + - path: output/idr/test.idrValues.txt + md5sum: 387441c716815e4caec3e70a2cc11a4a + - path: output/idr/test.idrValues.txt.png + md5sum: 7204083ca5b920b4215a5991c12cb4e7 + - path: output/idr/test.log.txt + md5sum: e6917133112b5cec135c182ffac19237 + +- name: idr test_idr_noprefix + command: nextflow run tests/modules/idr -entry test_idr_noprefix -c tests/config/nextflow.config + tags: + - idr + files: + - path: output/idr/idrValues.txt + md5sum: 09be837cc6abbc3eb5958b74802eea55 + - path: output/idr/idrValues.txt.png + md5sum: 4a7143ccc0ccadb37c2317bf626e6d96 + - path: output/idr/log.txt + md5sum: 6443507ac66b9d3b64bc56b78328083e From 81d65d4978a988b20ac21abf6a99c1baf936be65 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Mon, 1 Nov 2021 12:12:14 +0000 Subject: [PATCH 195/314] Samtools fixmate module (#991) * Samtools fixmate module * Update modules/samtools/fixmate/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/samtools/fixmate/main.nf Co-authored-by: James A. Fellows Yates * Update main.nf * Update modules/samtools/fixmate/meta.yml Co-authored-by: James A. Fellows Yates * Update meta.yml Co-authored-by: Simon Pearce Co-authored-by: James A. Fellows Yates --- modules/samtools/fixmate/functions.nf | 78 +++++++++++++++++++++++++ modules/samtools/fixmate/main.nf | 45 ++++++++++++++ modules/samtools/fixmate/meta.yml | 49 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/fixmate/main.nf | 14 +++++ tests/modules/samtools/fixmate/test.yml | 8 +++ 6 files changed, 198 insertions(+) create mode 100644 modules/samtools/fixmate/functions.nf create mode 100644 modules/samtools/fixmate/main.nf create mode 100644 modules/samtools/fixmate/meta.yml create mode 100644 tests/modules/samtools/fixmate/main.nf create mode 100644 tests/modules/samtools/fixmate/test.yml diff --git a/modules/samtools/fixmate/functions.nf b/modules/samtools/fixmate/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/samtools/fixmate/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf new file mode 100644 index 00000000..e1a766a1 --- /dev/null +++ b/modules/samtools/fixmate/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMTOOLS_FIXMATE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" + } else { + container "quay.io/biocontainers/samtools:1.14--hb421002_0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" + + """ + samtools \\ + fixmate \\ + $options.args \\ + -@ $task.cpus \\ + $bam \\ + ${prefix}.bam \\ + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samtools/fixmate/meta.yml b/modules/samtools/fixmate/meta.yml new file mode 100644 index 00000000..2cec6e7c --- /dev/null +++ b/modules/samtools/fixmate/meta.yml @@ -0,0 +1,49 @@ +name: samtools_fixmate +description: Samtools fixmate is a tool that can fill in information (insert size, cigar, mapq) about paired end reads onto the corresponding other read. Also has options to remove secondary/unmapped alignments and recalculate whether reads are proper pairs. +keywords: + - fixmate + - samtools + - insert size + - repair + - bam + - paired + - read pairs +tools: + - samtools: + description: | + SAMtools is a set of utilities for interacting with and post-processing + short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. + These files are generated as output by short read aligners like BWA. + homepage: http://www.htslib.org/ + documentation: http://www.htslib.org/doc/samtools.html + tool_dev_url: https://github.com/samtools/samtools + doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file, must be sorted by name, not coordinate + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: A BAM/CRAM/SAM file with mate information added and/or proper pairs recalled + pattern: "*.{bam,cram,sam}" + +authors: + - "@sppearce" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ea999b6a..6ec2d506 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1027,6 +1027,10 @@ samtools/fastq: - modules/samtools/fastq/** - tests/modules/samtools/fastq/** +samtools/fixmate: + - modules/samtools/fixmate/** + - tests/modules/samtools/fixmate/** + samtools/flagstat: - modules/samtools/flagstat/** - tests/modules/samtools/flagstat/** diff --git a/tests/modules/samtools/fixmate/main.nf b/tests/modules/samtools/fixmate/main.nf new file mode 100644 index 00000000..5174beab --- /dev/null +++ b/tests/modules/samtools/fixmate/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' addParams( options: [args:'-r -c -m'] ) + +workflow test_samtools_fixmate { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + + SAMTOOLS_FIXMATE ( input ) + +} diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml new file mode 100644 index 00000000..c7864c04 --- /dev/null +++ b/tests/modules/samtools/fixmate/test.yml @@ -0,0 +1,8 @@ +- name: samtools fixmate test_samtools_fixmate + command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config + tags: + - samtools/fixmate + - samtools + files: + - path: output/samtools/test.bam + md5sum: 92c8463710cdcaef2010aa02ed9e01fd From cac6dc83bb09d7f33640692fa37d4cb9087abf00 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Tue, 2 Nov 2021 09:59:25 +0000 Subject: [PATCH 196/314] gccounter module for hmmcopy (#1003) * hmmcopy gccounter working * Update modules/hmmcopy/gccounter/main.nf Co-authored-by: Chris Cheshire * Update main.nf Changed version to 0.1.1 as the container says Co-authored-by: Simon Pearce Co-authored-by: Chris Cheshire --- modules/hmmcopy/gccounter/functions.nf | 78 ++++++++++++++++++++++++ modules/hmmcopy/gccounter/main.nf | 40 ++++++++++++ modules/hmmcopy/gccounter/meta.yml | 33 ++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/hmmcopy/gccounter/main.nf | 11 ++++ tests/modules/hmmcopy/gccounter/test.yml | 8 +++ 6 files changed, 174 insertions(+) create mode 100644 modules/hmmcopy/gccounter/functions.nf create mode 100644 modules/hmmcopy/gccounter/main.nf create mode 100644 modules/hmmcopy/gccounter/meta.yml create mode 100644 tests/modules/hmmcopy/gccounter/main.nf create mode 100644 tests/modules/hmmcopy/gccounter/test.yml diff --git a/modules/hmmcopy/gccounter/functions.nf b/modules/hmmcopy/gccounter/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/hmmcopy/gccounter/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/hmmcopy/gccounter/main.nf b/modules/hmmcopy/gccounter/main.nf new file mode 100644 index 00000000..6e7bc11f --- /dev/null +++ b/modules/hmmcopy/gccounter/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '0.1.1' + +process HMMCOPY_GCCOUNTER { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" + } else { + container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" + } + + input: + path fasta + + output: + path "*.gc.wig" , emit: wig + path "versions.yml", emit: versions + + script: + """ + gcCounter \\ + $options.args \\ + ${fasta} > ${fasta.baseName}.gc.wig + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/hmmcopy/gccounter/meta.yml b/modules/hmmcopy/gccounter/meta.yml new file mode 100644 index 00000000..71727af2 --- /dev/null +++ b/modules/hmmcopy/gccounter/meta.yml @@ -0,0 +1,33 @@ +name: hmmcopy_gccounter +description: gcCounter function from HMMcopy utilities, used to generate GC content in non-overlapping windows from a fasta reference +keywords: + - hmmcopy + - gccounter + - cnv +tools: + - hmmcopy: + description: C++ based programs for analyzing BAM files and preparing read counts -- used with bioconductor-hmmcopy + homepage: https://github.com/shahcompbio/hmmcopy_utils + documentation: https://github.com/shahcompbio/hmmcopy_utils + tool_dev_url: https://github.com/shahcompbio/hmmcopy_utils + doi: "" + licence: ['GPL v3'] + +input: + - fasta: + type: file + description: Input genome fasta file + + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - wig: + type: file + description: wig file containing gc content of each window of the genome + pattern: "*.{gc.wig}" + +authors: + - "@sppearce" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6ec2d506..4e4526b1 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -577,6 +577,10 @@ hisat2/extractsplicesites: - modules/hisat2/extractsplicesites/** - tests/modules/hisat2/extractsplicesites/** +hmmcopy/gccounter: + - modules/hmmcopy/gccounter/** + - tests/modules/hmmcopy/gccounter/** + hmmer/hmmalign: - modules/hmmer/hmmalign/** - tests/modules/hmmer/hmmalign/** diff --git a/tests/modules/hmmcopy/gccounter/main.nf b/tests/modules/hmmcopy/gccounter/main.nf new file mode 100644 index 00000000..30846ca9 --- /dev/null +++ b/tests/modules/hmmcopy/gccounter/main.nf @@ -0,0 +1,11 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' addParams( options: [:] ) + +workflow test_hmmcopy_gccounter { + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HMMCOPY_GCCOUNTER (fasta) +} diff --git a/tests/modules/hmmcopy/gccounter/test.yml b/tests/modules/hmmcopy/gccounter/test.yml new file mode 100644 index 00000000..edcd6b92 --- /dev/null +++ b/tests/modules/hmmcopy/gccounter/test.yml @@ -0,0 +1,8 @@ +- name: hmmcopy gccounter test_hmmcopy_gccounter + command: nextflow run tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c tests/config/nextflow.config + tags: + - hmmcopy + - hmmcopy/gccounter + files: + - path: output/hmmcopy/genome.gc.wig + md5sum: 59ad14bc5aaa903187d7b248c9490deb From 374d81e0b39d24ec2ef84b6f931c74ded5e3e682 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Tue, 2 Nov 2021 11:01:13 +0000 Subject: [PATCH 197/314] readcounter module for hmmcopy (#1001) * readcounter module for hmmcopy * Changed version number * Fix indentation * Update main.nf * Update modules/hmmcopy/readcounter/main.nf Co-authored-by: Chris Cheshire Co-authored-by: Simon Pearce Co-authored-by: Chris Cheshire --- modules/hmmcopy/readcounter/functions.nf | 78 ++++++++++++++++++++++ modules/hmmcopy/readcounter/main.nf | 42 ++++++++++++ modules/hmmcopy/readcounter/meta.yml | 43 ++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/hmmcopy/readcounter/main.nf | 14 ++++ tests/modules/hmmcopy/readcounter/test.yml | 8 +++ 6 files changed, 189 insertions(+) create mode 100644 modules/hmmcopy/readcounter/functions.nf create mode 100644 modules/hmmcopy/readcounter/main.nf create mode 100644 modules/hmmcopy/readcounter/meta.yml create mode 100644 tests/modules/hmmcopy/readcounter/main.nf create mode 100644 tests/modules/hmmcopy/readcounter/test.yml diff --git a/modules/hmmcopy/readcounter/functions.nf b/modules/hmmcopy/readcounter/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/hmmcopy/readcounter/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf new file mode 100644 index 00000000..9e3e72a7 --- /dev/null +++ b/modules/hmmcopy/readcounter/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '0.1.1' + +process HMMCOPY_READCOUNTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" + } else { + container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" + } + + input: + tuple val(meta), path(bam), path(bai) + + output: + tuple val(meta), path("*.wig"), emit: wig + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + readCounter \\ + $options.args \\ + ${bam} > ${prefix}.wig + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/hmmcopy/readcounter/meta.yml b/modules/hmmcopy/readcounter/meta.yml new file mode 100644 index 00000000..9b09a55c --- /dev/null +++ b/modules/hmmcopy/readcounter/meta.yml @@ -0,0 +1,43 @@ +name: hmmcopy_readcounter +description: readCounter function from HMMcopy utilities, used to generate read in windows +keywords: + - hmmcopy + - readcounter + - cnv +tools: + - hmmcopy: + description: C++ based programs for analyzing BAM files and preparing read counts -- used with bioconductor-hmmcopy + homepage: https://github.com/shahcompbio/hmmcopy_utils + documentation: https://github.com/shahcompbio/hmmcopy_utils + tool_dev_url: https://github.com/shahcompbio/hmmcopy_utils + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - wig: + type: file + description: A wig file with the number of reads lying within each window in each chromosome + pattern: "*.wig" + +authors: + - "@sppearce" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4e4526b1..9320245f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -581,6 +581,10 @@ hmmcopy/gccounter: - modules/hmmcopy/gccounter/** - tests/modules/hmmcopy/gccounter/** +hmmcopy/readcounter: + - modules/hmmcopy/readcounter/** + - tests/modules/hmmcopy/readcounter/** + hmmer/hmmalign: - modules/hmmer/hmmalign/** - tests/modules/hmmer/hmmalign/** diff --git a/tests/modules/hmmcopy/readcounter/main.nf b/tests/modules/hmmcopy/readcounter/main.nf new file mode 100644 index 00000000..9025f98e --- /dev/null +++ b/tests/modules/hmmcopy/readcounter/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' addParams( options: [:] ) + +workflow test_hmmcopy_readcounter { + + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)] + ] + HMMCOPY_READCOUNTER ( input ) +} diff --git a/tests/modules/hmmcopy/readcounter/test.yml b/tests/modules/hmmcopy/readcounter/test.yml new file mode 100644 index 00000000..6c00ee08 --- /dev/null +++ b/tests/modules/hmmcopy/readcounter/test.yml @@ -0,0 +1,8 @@ +- name: hmmcopy readcounter test_hmmcopy_readcounter + command: nextflow run tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c tests/config/nextflow.config + tags: + - hmmcopy + - hmmcopy/readcounter + files: + - path: output/hmmcopy/test.wig + md5sum: 3655d8325baea81b3b690791262c6b57 From 4619d012e562a224b69d9c09d51e12e4394f3238 Mon Sep 17 00:00:00 2001 From: fbdtemme <69114541+fbdtemme@users.noreply.github.com> Date: Tue, 2 Nov 2021 17:00:25 +0100 Subject: [PATCH 198/314] Add cram support to Allelecounter module (#1013) * Add CRAM support to allelecounter * Update meta.yml * Rename bam,bai to input,input_index * Apply suggestions from code review * Fix reference to renamed variable Co-authored-by: Maxime U. Garcia --- modules/allelecounter/main.nf | 8 ++++++-- modules/allelecounter/meta.yml | 9 ++++++--- tests/modules/allelecounter/main.nf | 16 ++++++++++++++-- tests/modules/allelecounter/test.yml | 12 ++++++++++-- 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 5184df7d..8d986579 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -19,8 +19,9 @@ process ALLELECOUNTER { } input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(input), path(input_index) path loci + path fasta output: tuple val(meta), path("*.alleleCount"), emit: allelecount @@ -28,11 +29,14 @@ process ALLELECOUNTER { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def reference_options = fasta ? "-r $fasta": "" + """ alleleCounter \\ $options.args \\ -l $loci \\ - -b $bam \\ + -b $input \\ + $reference_options \\ -o ${prefix}.alleleCount cat <<-END_VERSIONS > versions.yml diff --git a/modules/allelecounter/meta.yml b/modules/allelecounter/meta.yml index a15f3eac..7d921e12 100644 --- a/modules/allelecounter/meta.yml +++ b/modules/allelecounter/meta.yml @@ -19,11 +19,11 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - bai: + - input_index: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" @@ -31,7 +31,9 @@ input: type: file description: loci file pattern: "*.{tsv}" - + - fasta: + type: file + description: Input genome fasta file. Required when passing CRAM files. output: - meta: @@ -50,3 +52,4 @@ output: authors: - "@fullama" + - "@fbdtemme" diff --git a/tests/modules/allelecounter/main.nf b/tests/modules/allelecounter/main.nf index 542529c2..b938ab94 100644 --- a/tests/modules/allelecounter/main.nf +++ b/tests/modules/allelecounter/main.nf @@ -3,12 +3,24 @@ nextflow.enable.dsl = 2 include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' addParams( options: [:] ) -workflow test_allelecounter { +workflow test_allelecounter_bam { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] positions = [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ] - ALLELECOUNTER ( input, positions ) + ALLELECOUNTER ( input, positions, [] ) +} + + +workflow test_allelecounter_cram { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + positions = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] + fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + + ALLELECOUNTER ( input, positions, fasta ) } diff --git a/tests/modules/allelecounter/test.yml b/tests/modules/allelecounter/test.yml index 7ed71559..bbef0ecc 100644 --- a/tests/modules/allelecounter/test.yml +++ b/tests/modules/allelecounter/test.yml @@ -1,7 +1,15 @@ -- name: allelecounter test_allelecounter - command: nextflow run tests/modules/allelecounter -entry test_allelecounter -c tests/config/nextflow.config +- name: allelecounter test_allelecounter_bam + command: nextflow run tests/modules/allelecounter -entry test_allelecounter_bam -c tests/config/nextflow.config tags: - allelecounter files: - path: output/allelecounter/test.alleleCount md5sum: 2bbe9d7331b78bdac30fe30dbc5fdaf3 + +- name: allelecounter test_allelecounter_cram + command: nextflow run tests/modules/allelecounter -entry test_allelecounter_cram -c tests/config/nextflow.config + tags: + - allelecounter + files: + - path: output/allelecounter/test.alleleCount + md5sum: 2f83352a185168c7c98e9e42550b2856 From 9767b081b9d808d7b6671d34d48c70afb436a5f1 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 3 Nov 2021 11:22:06 +0100 Subject: [PATCH 199/314] Merge freebayes subtools (#1015) * feat: merge freebayes subtools * fix: typo * assess comments from review * fix: path to module --- modules/freebayes/{germline => }/functions.nf | 0 modules/freebayes/{germline => }/main.nf | 7 +- modules/freebayes/{germline => }/meta.yml | 9 +- modules/freebayes/somatic/functions.nf | 78 --------------- modules/freebayes/somatic/main.nf | 74 --------------- modules/freebayes/somatic/meta.yml | 66 ------------- tests/config/pytest_modules.yml | 10 +- tests/modules/freebayes/germline/main.nf | 51 ---------- tests/modules/freebayes/germline/test.yml | 26 ----- tests/modules/freebayes/main.nf | 95 +++++++++++++++++++ tests/modules/freebayes/somatic/main.nf | 37 -------- tests/modules/freebayes/somatic/test.yml | 17 ---- tests/modules/freebayes/test.yml | 39 ++++++++ 13 files changed, 148 insertions(+), 361 deletions(-) rename modules/freebayes/{germline => }/functions.nf (100%) rename modules/freebayes/{germline => }/main.nf (91%) rename modules/freebayes/{germline => }/meta.yml (94%) delete mode 100644 modules/freebayes/somatic/functions.nf delete mode 100644 modules/freebayes/somatic/main.nf delete mode 100644 modules/freebayes/somatic/meta.yml delete mode 100644 tests/modules/freebayes/germline/main.nf delete mode 100644 tests/modules/freebayes/germline/test.yml create mode 100644 tests/modules/freebayes/main.nf delete mode 100644 tests/modules/freebayes/somatic/main.nf delete mode 100644 tests/modules/freebayes/somatic/test.yml create mode 100644 tests/modules/freebayes/test.yml diff --git a/modules/freebayes/germline/functions.nf b/modules/freebayes/functions.nf similarity index 100% rename from modules/freebayes/germline/functions.nf rename to modules/freebayes/functions.nf diff --git a/modules/freebayes/germline/main.nf b/modules/freebayes/main.nf similarity index 91% rename from modules/freebayes/germline/main.nf rename to modules/freebayes/main.nf index eae62036..0b23dc40 100644 --- a/modules/freebayes/germline/main.nf +++ b/modules/freebayes/main.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getProcessName; getSoftwareName } from './func params.options = [:] options = initOptions(params.options) -process FREEBAYES_GERMLINE { +process FREEBAYES { tag "$meta.id" label 'process_low' publishDir "${params.outdir}", @@ -19,7 +19,7 @@ process FREEBAYES_GERMLINE { } input: - tuple val(meta), path(input), path(input_index) + tuple val(meta), path(input_1), path(input_1_index), path(input_2), path(input_2_index) path fasta path fai path targets @@ -33,6 +33,7 @@ process FREEBAYES_GERMLINE { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" def populations_file = populations ? "--populations ${populations}" : "" @@ -48,7 +49,7 @@ process FREEBAYES_GERMLINE { $populations_file \\ $cnv_file \\ $options.args \\ - $input > ${prefix}.vcf + $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf diff --git a/modules/freebayes/germline/meta.yml b/modules/freebayes/meta.yml similarity index 94% rename from modules/freebayes/germline/meta.yml rename to modules/freebayes/meta.yml index 86650715..75d44826 100644 --- a/modules/freebayes/germline/meta.yml +++ b/modules/freebayes/meta.yml @@ -1,11 +1,14 @@ -name: freebayes_germline +name: freebayes description: A haplotype-based variant detector keywords: - variant caller - SNP - genotyping - - variant calling + - somatic variant calling + - germline variant calling + - bacterial variant calling - bayesian + tools: - freebayes: description: Bayesian haplotype-based polymorphism discovery and genotyping @@ -73,6 +76,8 @@ output: type: file description: Compressed VCF file pattern: "*.vcf.gz" + authors: - "@maxibor" - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/freebayes/somatic/functions.nf b/modules/freebayes/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/freebayes/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/freebayes/somatic/main.nf b/modules/freebayes/somatic/main.nf deleted file mode 100644 index c1579661..00000000 --- a/modules/freebayes/somatic/main.nf +++ /dev/null @@ -1,74 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process FREEBAYES_SOMATIC { - tag "$meta.id" - label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" - } else { - container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" - } - - input: - tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) - path fasta - path fai - path targets - path samples - - output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - path "versions.yml" , emit: versions - - script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def targets_file = targets ? "--target ${targets}" : "" - def samples_file = samples ? "--samples ${samples}" : "" - - if (task.cpus > 1) { - """ - freebayes-parallel \\ - <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ - -f $fasta \\ - $targets_file \\ - $samples_file \\ - $options.args \\ - $input_tumor \\ - $input_normal > ${prefix}.vcf - - gzip --no-name ${prefix}.vcf - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) - END_VERSIONS - """ - - } else { - """ - freebayes \\ - -f $fasta \\ - $targets_file \\ - $samples_file \\ - $options.args \\ - $input_tumor \\ - $input_normal > ${prefix}.vcf - - gzip --no-name ${prefix}.vcf - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) - END_VERSIONS - """ - } -} diff --git a/modules/freebayes/somatic/meta.yml b/modules/freebayes/somatic/meta.yml deleted file mode 100644 index 391e5007..00000000 --- a/modules/freebayes/somatic/meta.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: freebayes_somatic -description: A haplotype-based variant detector -keywords: - - variant caller - - SNP - - genotyping - - somatic variant calling - - bayesian - -tools: - - freebayes: - description: Bayesian haplotype-based polymorphism discovery and genotyping - homepage: https://github.com/freebayes/freebayes - documentation: https://github.com/freebayes/freebayes - tool_dev_url: https://github.com/freebayes/freebayes - doi: "arXiv:1207.3907" - licence: ['MIT'] - -input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - input: - type: file - description: BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" - - input_index: - type: file - description: BAM/CRAM/SAM index file - pattern: "*.bam.bai" - - fasta: - type: file - description: reference fasta file - pattern: ".{fa,fa.gz,fasta,fasta.gz}" - - fai: - type: file - description: reference fasta file index - pattern: "*.fai" - - targets: - type: file - description: Optional - Limit analysis to targets listed in this BED-format FILE. - pattern: "*.bed" - - samples: - type: file - description: Optional - Limit analysis to samples listed (one per line) in the FILE. - pattern: "*.txt" - -output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - version: - type: file - description: File containing software version - pattern: "*.{version.txt}" - - vcf: - type: file - description: Compressed VCF file - pattern: "*.vcf.gz" - -authors: - - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9320245f..047f83ff 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -410,13 +410,9 @@ flash: - modules/flash/** - tests/modules/flash/** -freebayes/germline: - - modules/freebayes/germline/** - - tests/modules/freebayes/germline/** - -freebayes/somatic: - - modules/freebayes/somatic/** - - tests/modules/freebayes/somatic/** +freebayes: + - modules/freebayes/** + - tests/modules/freebayes/** gatk4/applybqsr: - modules/gatk4/applybqsr/** diff --git a/tests/modules/freebayes/germline/main.nf b/tests/modules/freebayes/germline/main.nf deleted file mode 100644 index 1b39eea0..00000000 --- a/tests/modules/freebayes/germline/main.nf +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { FREEBAYES_GERMLINE } from '../../../../modules/freebayes/germline/main.nf' addParams( options: [:] ) - -workflow test_freebayes { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = [] - samples = [] - populations = [] - cnv = [] - - FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) -} - -workflow test_freebayes_bed { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) - samples = [] - populations = [] - cnv = [] - - FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) -} - -workflow test_freebayes_cram { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = [] - samples = [] - populations = [] - cnv = [] - - FREEBAYES_GERMLINE ( input, fasta, fai, targets, samples, populations, cnv) -} diff --git a/tests/modules/freebayes/germline/test.yml b/tests/modules/freebayes/germline/test.yml deleted file mode 100644 index 55925b92..00000000 --- a/tests/modules/freebayes/germline/test.yml +++ /dev/null @@ -1,26 +0,0 @@ -- name: freebayes germline test_freebayes - command: nextflow run tests/modules/freebayes/germline -entry test_freebayes -c tests/config/nextflow.config - tags: - - freebayes - - freebayes/germline - files: - - path: output/freebayes/test.vcf.gz - md5sum: 1ec210ad27514c7a4140c924dc66d979 - -- name: freebayes germline test_freebayes_bed - command: nextflow run tests/modules/freebayes/germline -entry test_freebayes_bed -c tests/config/nextflow.config - tags: - - freebayes - - freebayes/germline - files: - - path: output/freebayes/test.vcf.gz - md5sum: e8923cccd5dac196f72d3d3997a60706 - -- name: freebayes germline test_freebayes_cram - command: nextflow run tests/modules/freebayes/germline -entry test_freebayes_cram -c tests/config/nextflow.config - tags: - - freebayes - - freebayes/germline - files: - - path: output/freebayes/test.vcf.gz - md5sum: cb57a3ed154618e3aa4a5272fcfb7521 diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf new file mode 100644 index 00000000..c6f5641f --- /dev/null +++ b/tests/modules/freebayes/main.nf @@ -0,0 +1,95 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) + +workflow test_freebayes { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [], + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES (input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_bed { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [], + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + samples = [] + populations = [] + cnv = [] + + FREEBAYES (input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_cram { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + [], + [] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES (input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_somatic { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = [] + samples = [] + populations = [] + cnv = [] + + FREEBAYES (input, fasta, fai, targets, samples, populations, cnv) +} + +workflow test_freebayes_somatic_cram_intervals { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + targets = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + samples = [] + populations = [] + cnv = [] + + FREEBAYES (input, fasta, fai, targets, samples, populations, cnv) +} diff --git a/tests/modules/freebayes/somatic/main.nf b/tests/modules/freebayes/somatic/main.nf deleted file mode 100644 index d26caf34..00000000 --- a/tests/modules/freebayes/somatic/main.nf +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { FREEBAYES_SOMATIC } from '../../../../modules/freebayes/somatic/main.nf' addParams( options: [:] ) - -workflow test_freebayes { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = [] - samples = [] - - FREEBAYES_SOMATIC ( input, fasta, fai, targets, samples) -} - -workflow test_freebayes_intervals { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true) - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - targets = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) - samples = [] - - FREEBAYES_SOMATIC ( input, fasta, fai, targets, samples) -} diff --git a/tests/modules/freebayes/somatic/test.yml b/tests/modules/freebayes/somatic/test.yml deleted file mode 100644 index 93113e60..00000000 --- a/tests/modules/freebayes/somatic/test.yml +++ /dev/null @@ -1,17 +0,0 @@ -- name: freebayes somatic test_freebayes - command: nextflow run tests/modules/freebayes/somatic -entry test_freebayes -c tests/config/nextflow.config - tags: - - freebayes/somatic - - freebayes - files: - - path: output/freebayes/test.vcf.gz - md5sum: 1c47d02f27ec5918558c8688ce6e7780 - -- name: freebayes somatic test_freebayes_intervals - command: nextflow run tests/modules/freebayes/somatic -entry test_freebayes_intervals -c tests/config/nextflow.config - tags: - - freebayes/somatic - - freebayes - files: - - path: output/freebayes/test.vcf.gz - md5sum: 5b8a12666bde63746dcec7afcd3ef789 diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml new file mode 100644 index 00000000..a5840609 --- /dev/null +++ b/tests/modules/freebayes/test.yml @@ -0,0 +1,39 @@ +- name: freebayes test_freebayes + command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 04d60a7135768777e0c764daec6519db + +- name: freebayes test_freebayes_bed + command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: fc6e228c8ac5508bd83da45eafc2e7b2 + +- name: freebayes test_freebayes_cram + command: nextflow run tests/modules/freebayes -entry test_freebayes_cram -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 91b8caaa0e396e1ba4f264a83bb67254 + +- name: freebayes test_freebayes_somatic + command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: 40da977199f16d9888e0e0e07e8bebee + +- name: freebayes test_freebayes_somatic_cram_intervals + command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c tests/config/nextflow.config + tags: + - freebayes + files: + - path: output/freebayes/test.vcf.gz + md5sum: dd976880365287d9ad31a606eb4d091f From 11226d9d98a59af9519a871f28df183391c0d300 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 3 Nov 2021 11:41:12 +0100 Subject: [PATCH 200/314] fix remove md5sum check (#1017) --- tests/modules/freebayes/test.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml index a5840609..22fd0e88 100644 --- a/tests/modules/freebayes/test.yml +++ b/tests/modules/freebayes/test.yml @@ -4,7 +4,6 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: 04d60a7135768777e0c764daec6519db - name: freebayes test_freebayes_bed command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config @@ -12,7 +11,6 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: fc6e228c8ac5508bd83da45eafc2e7b2 - name: freebayes test_freebayes_cram command: nextflow run tests/modules/freebayes -entry test_freebayes_cram -c tests/config/nextflow.config @@ -20,7 +18,6 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: 91b8caaa0e396e1ba4f264a83bb67254 - name: freebayes test_freebayes_somatic command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic -c tests/config/nextflow.config @@ -28,7 +25,6 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: 40da977199f16d9888e0e0e07e8bebee - name: freebayes test_freebayes_somatic_cram_intervals command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c tests/config/nextflow.config @@ -36,4 +32,3 @@ - freebayes files: - path: output/freebayes/test.vcf.gz - md5sum: dd976880365287d9ad31a606eb4d091f From 08b71fa85f69147f7c56552bfca08045ac43a137 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 3 Nov 2021 17:01:23 +0100 Subject: [PATCH 201/314] New module: `gunc run` (+ `gunc downloaddb`) (#880) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add GUNC download_db and run commands * Bump with version without zgrep * Apply suggestions from code review Co-authored-by: Robert A. Petit III * Harshil formatting * Apply suggestions from code review Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: Robert A. Petit III --- modules/gunc/downloaddb/functions.nf | 78 ++++++++++++++++++++++++++ modules/gunc/downloaddb/main.nf | 37 ++++++++++++ modules/gunc/downloaddb/meta.yml | 36 ++++++++++++ modules/gunc/run/functions.nf | 78 ++++++++++++++++++++++++++ modules/gunc/run/main.nf | 45 +++++++++++++++ modules/gunc/run/meta.yml | 53 +++++++++++++++++ tests/config/pytest_modules.yml | 8 +++ tests/modules/gunc/downloaddb/main.nf | 12 ++++ tests/modules/gunc/downloaddb/test.yml | 8 +++ tests/modules/gunc/run/main.nf | 17 ++++++ tests/modules/gunc/run/test.yml | 8 +++ 11 files changed, 380 insertions(+) create mode 100644 modules/gunc/downloaddb/functions.nf create mode 100644 modules/gunc/downloaddb/main.nf create mode 100644 modules/gunc/downloaddb/meta.yml create mode 100644 modules/gunc/run/functions.nf create mode 100644 modules/gunc/run/main.nf create mode 100644 modules/gunc/run/meta.yml create mode 100644 tests/modules/gunc/downloaddb/main.nf create mode 100644 tests/modules/gunc/downloaddb/test.yml create mode 100644 tests/modules/gunc/run/main.nf create mode 100644 tests/modules/gunc/run/test.yml diff --git a/modules/gunc/downloaddb/functions.nf b/modules/gunc/downloaddb/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gunc/downloaddb/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gunc/downloaddb/main.nf b/modules/gunc/downloaddb/main.nf new file mode 100644 index 00000000..af421608 --- /dev/null +++ b/modules/gunc/downloaddb/main.nf @@ -0,0 +1,37 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GUNC_DOWNLOADDB { + tag '$db_name' + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" + } + + input: + val db_name + + output: + path "*.dmnd" , emit: db + path "versions.yml" , emit: versions + + script: + """ + gunc download_db . -db $db_name $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( gunc --version ) + END_VERSIONS + """ +} diff --git a/modules/gunc/downloaddb/meta.yml b/modules/gunc/downloaddb/meta.yml new file mode 100644 index 00000000..cb486da0 --- /dev/null +++ b/modules/gunc/downloaddb/meta.yml @@ -0,0 +1,36 @@ +name: gunc_downloaddb +description: Download database for GUNC detection of Chimerism and Contamination in Prokaryotic Genomes +keywords: + - download + - prokaryote + - assembly + - genome + - quality control + - chimeras +tools: + - gunc: + description: Python package for detection of chimerism and contamination in prokaryotic genomes. + homepage: https://grp-bork.embl-community.io/gunc/ + documentation: https://grp-bork.embl-community.io/gunc/ + tool_dev_url: https://github.com/grp-bork/gunc + doi: "10.1186/s13059-021-02393-0" + licence: ['GNU General Public v3 or later (GPL v3+)'] + +input: + - db_name: + type: string + description: "Which database to download. Options: progenomes or gtdb" + pattern: "progenomes|gtdb" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - db: + type: file + description: GUNC database file + pattern: "*.dmnd" + +authors: + - "@jfy133" diff --git a/modules/gunc/run/functions.nf b/modules/gunc/run/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gunc/run/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf new file mode 100644 index 00000000..f873a7df --- /dev/null +++ b/modules/gunc/run/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GUNC_RUN { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + path(db) + + output: + tuple val(meta), path("*maxCSS_level.tsv") , emit: maxcss_level_tsv + tuple val(meta), path("*all_levels.tsv") , optional: true, emit: all_levels_tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + gunc \\ + run \\ + --input_fasta $fasta \\ + --db_file $db \\ + --threads $task.cpus \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( gunc --version ) + END_VERSIONS + """ +} diff --git a/modules/gunc/run/meta.yml b/modules/gunc/run/meta.yml new file mode 100644 index 00000000..1dd4a8ae --- /dev/null +++ b/modules/gunc/run/meta.yml @@ -0,0 +1,53 @@ +name: gunc_run +description: Detection of Chimerism and Contamination in Prokaryotic Genomes +keywords: + - prokaryote + - assembly + - genome + - quality control + - chimeras +tools: + - gunc: + description: Python package for detection of chimerism and contamination in prokaryotic genomes. + homepage: https://grp-bork.embl-community.io/gunc/ + documentation: https://grp-bork.embl-community.io/gunc/ + tool_dev_url: https://github.com/grp-bork/gunc + doi: "10.1186/s13059-021-02393-0" + licence: ['GNU General Public v3 or later (GPL v3+)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA file containing contig (bins) + pattern: "*.fa" + - db: + type: file + description: GUNC database file + pattern: "*.dmnd" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - maxcss_levels_tsv: + type: file + description: Output file with scores for a taxonomic level with the highest CSS score + pattern: "*.tsv" + - all_levels_tsv: + type: file + description: Optional output file with results for each taxonomic level + pattern: "*.tsv" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 047f83ff..8d8f32f3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -546,6 +546,14 @@ gubbins: - modules/gubbins/** - tests/modules/gubbins/** +gunc/downloaddb: + - modules/gunc/downloaddb/** + - tests/modules/gunc/downloaddb/** + +gunc/run: + - modules/gunc/run/** + - tests/modules/gunc/run/** + gunzip: - modules/gunzip/** - tests/modules/gunzip/** diff --git a/tests/modules/gunc/downloaddb/main.nf b/tests/modules/gunc/downloaddb/main.nf new file mode 100644 index 00000000..c0321279 --- /dev/null +++ b/tests/modules/gunc/downloaddb/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) + +workflow test_gunc_downloaddb { + + input = 'progenomes' + + GUNC_DOWNLOADDB ( input ) +} diff --git a/tests/modules/gunc/downloaddb/test.yml b/tests/modules/gunc/downloaddb/test.yml new file mode 100644 index 00000000..d1aafae7 --- /dev/null +++ b/tests/modules/gunc/downloaddb/test.yml @@ -0,0 +1,8 @@ +- name: gunc downloaddb + command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c tests/config/nextflow.config + tags: + - gunc + - gunc/downloaddb + files: + - path: output/gunc/gunc_db_progenomes2.1.dmnd + md5sum: 447c9330056b02f29f30fe81fe4af4eb diff --git a/tests/modules/gunc/run/main.nf b/tests/modules/gunc/run/main.nf new file mode 100644 index 00000000..a1a191dc --- /dev/null +++ b/tests/modules/gunc/run/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' addParams( options: [:] ) +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) + + +workflow test_gunc_run { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] + + GUNC_DOWNLOADDB('progenomes') + + GUNC_RUN ( input, GUNC_DOWNLOADDB.out.db ) +} diff --git a/tests/modules/gunc/run/test.yml b/tests/modules/gunc/run/test.yml new file mode 100644 index 00000000..d527f37e --- /dev/null +++ b/tests/modules/gunc/run/test.yml @@ -0,0 +1,8 @@ +- name: gunc run + command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c tests/config/nextflow.config + tags: + - gunc + - gunc/run + files: + - path: output/gunc/GUNC.progenomes_2.1.maxCSS_level.tsv + md5sum: 0420c1a9f2c50fefaee9fab5d80a551a From fc4f3e8822865d85904c1e96e93868dae7247a81 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 4 Nov 2021 01:49:30 -0600 Subject: [PATCH 202/314] add seqsero2 module (#1016) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add seqsero2 module * correct lint errors * Update modules/seqsero2/main.nf Co-authored-by: Sébastien Guizard * set output directory Co-authored-by: Sébastien Guizard --- modules/seqsero2/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/seqsero2/main.nf | 45 +++++++++++++++++++ modules/seqsero2/meta.yml | 52 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 12 +++-- tests/modules/seqsero2/main.nf | 13 ++++++ tests/modules/seqsero2/test.yml | 11 +++++ 6 files changed, 207 insertions(+), 4 deletions(-) create mode 100644 modules/seqsero2/functions.nf create mode 100644 modules/seqsero2/main.nf create mode 100644 modules/seqsero2/meta.yml create mode 100644 tests/modules/seqsero2/main.nf create mode 100644 tests/modules/seqsero2/test.yml diff --git a/modules/seqsero2/functions.nf b/modules/seqsero2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/seqsero2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf new file mode 100644 index 00000000..3748a6e4 --- /dev/null +++ b/modules/seqsero2/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SEQSERO2 { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::seqsero2=1.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0" + } else { + container "quay.io/biocontainers/seqsero2:1.2.1--py_0" + } + + input: + tuple val(meta), path(seqs) + + output: + tuple val(meta), path("results/*_log.txt") , emit: log + tuple val(meta), path("results/*_result.tsv"), emit: tsv + tuple val(meta), path("results/*_result.txt"), emit: txt + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + SeqSero2_package.py \\ + $options.args \\ + -d results/ \\ + -n $prefix \\ + -p $task.cpus \\ + -i $seqs + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) + END_VERSIONS + """ +} diff --git a/modules/seqsero2/meta.yml b/modules/seqsero2/meta.yml new file mode 100644 index 00000000..ceea80e3 --- /dev/null +++ b/modules/seqsero2/meta.yml @@ -0,0 +1,52 @@ +name: seqsero2 +description: Salmonella serotype prediction from reads and assemblies +keywords: + - fasta + - fastq + - salmonella + - sertotype +tools: + - seqsero2: + description: Salmonella serotype prediction from genome sequencing data + homepage: https://github.com/denglab/SeqSero2 + documentation: https://github.com/denglab/SeqSero2 + tool_dev_url: https://github.com/denglab/SeqSero2 + doi: "10.1128/AEM.01746-19" + licence: ['GPL v2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - seqs: + type: file + description: FASTQ or FASTA formated sequences + pattern: "*.{fq.gz,fastq.gz,fna.gz,fna,fasta.gz,fasta,fa.gz,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: A log of serotype antigen results + pattern: "*_log.txt" + - tsv: + type: file + description: Tab-delimited summary of the SeqSero2 results + pattern: "*_result.tsv" + - txt: + type: file + description: Detailed summary of the SeqSero2 results + pattern: "*_result.txt" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8d8f32f3..008c98dc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -446,14 +446,14 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/genomicsdbimport: - - modules/gatk4/genomicsdbimport/** - - tests/modules/gatk4/genomicsdbimport/** - gatk4/filtermutectcalls: - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** +gatk4/genomicsdbimport: + - modules/gatk4/genomicsdbimport/** + - tests/modules/gatk4/genomicsdbimport/** + gatk4/getpileupsummaries: - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -1083,6 +1083,10 @@ seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** +seqsero2: + - modules/seqsero2/** + - tests/modules/seqsero2/** + seqtk/mergepe: - modules/seqtk/mergepe/** - tests/modules/seqtk/mergepe/** diff --git a/tests/modules/seqsero2/main.nf b/tests/modules/seqsero2/main.nf new file mode 100644 index 00000000..04ee8e27 --- /dev/null +++ b/tests/modules/seqsero2/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' addParams( options: [args: '-m k -t 4'] ) + +workflow test_seqsero2 { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + SEQSERO2 ( input ) +} diff --git a/tests/modules/seqsero2/test.yml b/tests/modules/seqsero2/test.yml new file mode 100644 index 00000000..2aa49686 --- /dev/null +++ b/tests/modules/seqsero2/test.yml @@ -0,0 +1,11 @@ +- name: seqsero2 test_seqsero2 + command: nextflow run tests/modules/seqsero2 -entry test_seqsero2 -c tests/config/nextflow.config + tags: + - seqsero2 + files: + - path: output/seqsero2/results/SeqSero_log.txt + md5sum: d00242dfa734b5abb3622a6048f0b4fb + - path: output/seqsero2/results/SeqSero_result.tsv + contains: ['Sample', 'Predicted', 'Note'] + - path: output/seqsero2/results/SeqSero_result.txt + contains: ['Sample', 'Predicted', 'Note'] From 02892ef65427bc95827d9d422368b55841dfb974 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Thu, 4 Nov 2021 18:18:56 +0100 Subject: [PATCH 203/314] New module: Samblaster (#954) * add base code from samblaster * added test yml * fixing versions files, should this be the cause of online lint failures * removed tmp files that shouldn't be there * fixing output file name - 1 Co-authored-by: Harshil Patel * fixing output file name - 2 Co-authored-by: Harshil Patel * fixing output file name - 3 Co-authored-by: Harshil Patel * fixing output file name - 4 Co-authored-by: Harshil Patel * fixing output file name - 5 Co-authored-by: Harshil Patel * fixing output file name - 6 * fixed indent * fixed input name and updated test.yml file with new name Co-authored-by: Harshil Patel --- modules/samblaster/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/samblaster/main.nf | 42 +++++++++++++++++ modules/samblaster/meta.yml | 53 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samblaster/main.nf | 13 ++++++ tests/modules/samblaster/test.yml | 7 +++ 6 files changed, 197 insertions(+) create mode 100644 modules/samblaster/functions.nf create mode 100644 modules/samblaster/main.nf create mode 100644 modules/samblaster/meta.yml create mode 100644 tests/modules/samblaster/main.nf create mode 100644 tests/modules/samblaster/test.yml diff --git a/modules/samblaster/functions.nf b/modules/samblaster/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/samblaster/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf new file mode 100644 index 00000000..4481d8cd --- /dev/null +++ b/modules/samblaster/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMBLASTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" + } else { + container "quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + samtools view -h $options.args2 $bam | \\ + samblaster $options.args | \\ + samtools view $options.args3 -Sb - >${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samblaster/meta.yml b/modules/samblaster/meta.yml new file mode 100644 index 00000000..4d51f4fe --- /dev/null +++ b/modules/samblaster/meta.yml @@ -0,0 +1,53 @@ +name: samblaster +description: | + This module combines samtools and samblaster in order to use + samblaster capability to filter or tag SAM files, with the advantage + of maintaining both input and output in BAM format. + Samblaster input must contain a sequence header: for this reason it has been piped + with the "samtools view -h" command. + Additional desired arguments for samtools can be passed using: + options.args2 for the input bam file + options.args3 for the output bam file +keywords: + - sort +tools: + - samblaster: + description: | + samblaster is a fast and flexible program for marking duplicates in read-id grouped paired-end SAM files. + It can also optionally output discordant read pairs and/or split read mappings to separate SAM files, + and/or unmapped/clipped reads to a separate FASTQ file. + By default, samblaster reads SAM input from stdin and writes SAM to stdout. + homepage: None + documentation: https://github.com/GregoryFaust/samblaster + tool_dev_url: https://github.com/GregoryFaust/samblaster + doi: "10.1093/bioinformatics/btu314" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Tagged or filtered BAM file + pattern: "*.bam" + +authors: + - "@lescai" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 008c98dc..41694c2e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1019,6 +1019,10 @@ salmon/quant: - modules/salmon/quant/** - tests/modules/salmon/quant/** +samblaster: + - modules/samblaster/** + - tests/modules/samblaster/** + samtools/ampliconclip: - modules/samtools/ampliconclip/** - tests/modules/samtools/ampliconclip/** diff --git a/tests/modules/samblaster/main.nf b/tests/modules/samblaster/main.nf new file mode 100644 index 00000000..5983d130 --- /dev/null +++ b/tests/modules/samblaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMBLASTER } from '../../../modules/samblaster/main.nf' addParams( options: [args: "-M --addMateTags", suffix:'.processed'] ) + +workflow test_samblaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_bam'], checkIfExists: true) ] + + SAMBLASTER ( input ) +} diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml new file mode 100644 index 00000000..d56d4330 --- /dev/null +++ b/tests/modules/samblaster/test.yml @@ -0,0 +1,7 @@ +- name: samblaster test_samblaster + command: nextflow run tests/modules/samblaster -entry test_samblaster -c tests/config/nextflow.config + tags: + - samblaster + files: + - path: output/samblaster/test.processed.bam + md5sum: 950f23d85f75be1cf872f45c0144bdf4 From e560fbbc3ed68e62bc810a50f552394056f81762 Mon Sep 17 00:00:00 2001 From: Lasse Folkersen Date: Fri, 5 Nov 2021 10:25:54 +0100 Subject: [PATCH 204/314] Imputeme (#882) * first commit with imputeme as a module. Extensive re-write of imputeme-code, resulting in release v1.0.7 that is runnable in the next-flow framework. Co-authored-by: EC2 Default User Co-authored-by: Harshil Patel Co-authored-by: Pontus Freyhult --- modules/gunzip/test.txt.gz | Bin 0 -> 47 bytes modules/imputeme/vcftoprs/functions.nf | 78 +++++++++++++++++++++++ modules/imputeme/vcftoprs/main.nf | 60 +++++++++++++++++ modules/imputeme/vcftoprs/meta.yml | 41 ++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/imputeme/vcftoprs/main.nf | 15 +++++ tests/modules/imputeme/vcftoprs/test.yml | 8 +++ 8 files changed, 208 insertions(+) create mode 100644 modules/gunzip/test.txt.gz create mode 100644 modules/imputeme/vcftoprs/functions.nf create mode 100644 modules/imputeme/vcftoprs/main.nf create mode 100644 modules/imputeme/vcftoprs/meta.yml create mode 100644 tests/modules/imputeme/vcftoprs/main.nf create mode 100644 tests/modules/imputeme/vcftoprs/test.yml diff --git a/modules/gunzip/test.txt.gz b/modules/gunzip/test.txt.gz new file mode 100644 index 0000000000000000000000000000000000000000..381417cf643f1b5c547b57b251d71e6d5ce11e16 GIT binary patch literal 47 zcmb2|=HU3lo{`AFT#{N`qE}K;!r-m#=Xv^+o}cIW6JE^0nUR_|V;IhR&VMY%z`y_i DevlAG literal 0 HcmV?d00001 diff --git a/modules/imputeme/vcftoprs/functions.nf b/modules/imputeme/vcftoprs/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/imputeme/vcftoprs/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..a3ce7e3c --- /dev/null +++ b/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,60 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + + +params.options = [:] +options = initOptions(params.options) + +process IMPUTEME_VCFTOPRS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "YOUR-TOOL-HERE" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img" + } else { + container "biocontainers/imputeme:vv1.0.7_cv1" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + #!/usr/bin/env Rscript + + #Set configuration - either from options.args or from defaults + source("/imputeme/code/impute-me/functions.R") + if(file.exists('$options.args')){ + set_conf("set_from_file",'$options.args') + }else{ + set_conf("set_from_file", "/imputeme/code/impute-me/template/nextflow_default_configuration.R") + } + + #main run + return_message <- prepare_individual_genome('$vcf',overrule_vcf_checks=T) + uniqueID <- sub(' .+\$','',sub('^.+this run is ','',return_message)) + convert_vcfs_to_simple_format(uniqueID=uniqueID) + crawl_for_snps_to_analyze(uniqueIDs=uniqueID) + run_export_script(uniqueIDs=uniqueID) + file.copy(paste0("./",uniqueID,"/",uniqueID,"_data.json"),"output.json") + + #version export. Have to hardcode process name and software name because + #won't run inside an R-block + version_file_path="versions.yml" + f <- file(version_file_path,"w") + writeLines("IMPUTEME_VCFTOPRS:", f) + writeLines(paste0(" imputeme: ", sub("^v","",get_conf("version"))),f) + close(f) + + """ + +} diff --git a/modules/imputeme/vcftoprs/meta.yml b/modules/imputeme/vcftoprs/meta.yml new file mode 100644 index 00000000..8ba5dfe1 --- /dev/null +++ b/modules/imputeme/vcftoprs/meta.yml @@ -0,0 +1,41 @@ +name: imputeme_vcftoprs +description: inputs a VCF-file with whole genome DNA sequencing. Outputs a JSON with polygenic risk scores. +keywords: + - PRS, VCF +tools: + - imputeme: + description: + homepage: www.impute.me + documentation: https://hub.docker.com/repository/docker/lassefolkersen/impute-me + tool_dev_url: https://github.com/lassefolkersen/impute-me + doi: "https://doi.org/10.3389/fgene.2020.00578" + licence: LGPL3 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ vcf:'test', single_end:false ] + - vcf: + type: file + description: vcf file + pattern: "*.{vcf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: json containing Z-scores for all calculated PRS + pattern: "*.{json}" + +authors: + - "@lassefolkersen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 41694c2e..a8fa40df 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -609,6 +609,10 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** +imputeme/vcftoprs: + - modules/imputeme/vcftoprs/** + - tests/modules/imputeme/vcftoprs/** + idr: - modules/idr/** - tests/modules/idr/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 12252542..2d30880f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -119,6 +119,8 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" + syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } diff --git a/tests/modules/imputeme/vcftoprs/main.nf b/tests/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..ff59ca5e --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' addParams( options: [:] ) + +workflow test_imputeme_vcftoprs { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + IMPUTEME_VCFTOPRS ( input ) +} diff --git a/tests/modules/imputeme/vcftoprs/test.yml b/tests/modules/imputeme/vcftoprs/test.yml new file mode 100644 index 00000000..efb73769 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/test.yml @@ -0,0 +1,8 @@ +- name: imputeme vcftoprs test_imputeme_vcftoprs + command: nextflow run tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c tests/config/nextflow.config + tags: + - imputeme + - imputeme/vcftoprs + files: + - path: output/imputeme/output.json + contains: [ 'type_2_diabetes_32541925":{"GRS":[24.01]' ] From 02932973fa5a1ee4cc4897ad44914a6bb015edf1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 6 Nov 2021 16:05:04 +0100 Subject: [PATCH 205/314] bwameth: Add touch command to fix problem with bwameth rejecting older files (#1037) See https://github.com/nf-core/methylseq/pull/217 for context where this fix was added into the DSL1 methylseq pipeline. --- modules/bwameth/align/main.nf | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 9b1d2b86..e15aba6d 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -32,6 +32,10 @@ process BWAMETH_ALIGN { """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` + # Modify the timestamps so that bwameth doesn't complain about building the index + # See https://github.com/nf-core/methylseq/pull/217 + touch -c -- * + bwameth.py \\ $options.args \\ $read_group \\ From 22aa168622d6fa8c8da5a845979ddd3e8c619005 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sat, 6 Nov 2021 09:34:39 -0600 Subject: [PATCH 206/314] add scoary module (#1034) Co-authored-by: Gregor Sturm --- modules/scoary/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/scoary/main.nf | 45 +++++++++++++++++++ modules/scoary/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/scoary/main.nf | 15 +++++++ tests/modules/scoary/test.yml | 9 ++++ 6 files changed, 202 insertions(+) create mode 100644 modules/scoary/functions.nf create mode 100644 modules/scoary/main.nf create mode 100644 modules/scoary/meta.yml create mode 100644 tests/modules/scoary/main.nf create mode 100644 tests/modules/scoary/test.yml diff --git a/modules/scoary/functions.nf b/modules/scoary/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/scoary/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf new file mode 100644 index 00000000..5720b4e5 --- /dev/null +++ b/modules/scoary/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SCOARY { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::scoary=1.6.16" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2" + } else { + container "quay.io/biocontainers/scoary:1.6.16--py_2" + } + + input: + tuple val(meta), path(genes), path(traits) + path(tree) + + output: + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def newick_tree = tree ? "-n ${tree}" : "" + """ + scoary \\ + $options.args \\ + --no-time \\ + --threads $task.cpus \\ + --traits $traits \\ + --genes $genes + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( scoary --version 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/scoary/meta.yml b/modules/scoary/meta.yml new file mode 100644 index 00000000..e8e8515e --- /dev/null +++ b/modules/scoary/meta.yml @@ -0,0 +1,51 @@ +name: scoary +description: Use pangenome outputs for GWAS +keywords: + - gwas + - pangenome + - prokaryote +tools: + - scoary: + description: Microbial pan-GWAS using the output from Roary + homepage: https://github.com/AdmiralenOla/Scoary + documentation: https://github.com/AdmiralenOla/Scoary + tool_dev_url: https://github.com/AdmiralenOla/Scoary + doi: "10.1186/s13059-016-1108-8" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - genes: + type: file + description: A presence/absence matrix of genes in the pan-genome + pattern: "*.csv" + - traits: + type: file + description: A CSV file containing trait information per-sample + pattern: "*.csv" + - tree: + type: file + description: A Newick formtted tree for phylogenetic analyses + pattern: "*.{dnd,nwk,treefile}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - csv: + type: file + description: Gene associations in a CSV file per trait + pattern: "*.csv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a8fa40df..6d9d0d12 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1083,6 +1083,10 @@ samtools/view: - modules/samtools/view/** - tests/modules/samtools/view/** +scoary: + - modules/scoary/** + - tests/modules/scoary/** + seacr/callpeak: - modules/seacr/callpeak/** - tests/modules/seacr/callpeak/** diff --git a/tests/modules/scoary/main.nf b/tests/modules/scoary/main.nf new file mode 100644 index 00000000..ec3f6e9f --- /dev/null +++ b/tests/modules/scoary/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SCOARY } from '../../../modules/scoary/main.nf' addParams( options: [:] ) + +workflow test_scoary { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Gene_presence_absence.csv", checkIfExists: true), + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Tetracycline_resistance.csv", checkIfExists: true) ] + + tree = [] + SCOARY ( input, tree) +} diff --git a/tests/modules/scoary/test.yml b/tests/modules/scoary/test.yml new file mode 100644 index 00000000..c5269293 --- /dev/null +++ b/tests/modules/scoary/test.yml @@ -0,0 +1,9 @@ +- name: scoary test_scoary + command: nextflow run tests/modules/scoary -entry test_scoary -c tests/config/nextflow.config + tags: + - scoary + files: + - path: output/scoary/Bogus_trait.results.csv + md5sum: 9550c692bbe6ff0ac844357bfabb809b + - path: output/scoary/Tetracycline_resistance.results.csv + md5sum: a87740818ab4de69a758fc75d7b879dd From 729d9ae450e166938435cd9da9d95bbe6ad9062c Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sat, 6 Nov 2021 09:44:26 -0600 Subject: [PATCH 207/314] add meningotype module (#1022) Co-authored-by: Gregor Sturm --- modules/meningotype/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/meningotype/main.nf | 41 ++++++++++++++++ modules/meningotype/meta.yml | 43 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/meningotype/main.nf | 13 +++++ tests/modules/meningotype/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/meningotype/functions.nf create mode 100644 modules/meningotype/main.nf create mode 100644 modules/meningotype/meta.yml create mode 100644 tests/modules/meningotype/main.nf create mode 100644 tests/modules/meningotype/test.yml diff --git a/modules/meningotype/functions.nf b/modules/meningotype/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/meningotype/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf new file mode 100644 index 00000000..4e779e8c --- /dev/null +++ b/modules/meningotype/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MENINGOTYPE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::meningotype=0.8.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + meningotype \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) + END_VERSIONS + """ +} diff --git a/modules/meningotype/meta.yml b/modules/meningotype/meta.yml new file mode 100644 index 00000000..07c2ff5e --- /dev/null +++ b/modules/meningotype/meta.yml @@ -0,0 +1,43 @@ +name: meningotype +description: Serotyping of Neisseria meningitidis assemblies +keywords: + - fasta + - Neisseria meningitidis + - serotype +tools: + - meningotype: + description: In silico serotyping and finetyping (porA and fetA) of Neisseria meningitidis + homepage: https://github.com/MDU-PHL/meningotype + documentation: https://github.com/MDU-PHL/meningotype + tool_dev_url: https://github.com/MDU-PHL/meningotype + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6d9d0d12..37da142b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -766,6 +766,10 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +meningotype: + - modules/meningotype/** + - tests/modules/meningotype/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/meningotype/main.nf b/tests/modules/meningotype/main.nf new file mode 100644 index 00000000..d660ec72 --- /dev/null +++ b/tests/modules/meningotype/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' addParams( options: [:] ) + +workflow test_meningotype { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + MENINGOTYPE ( input ) +} diff --git a/tests/modules/meningotype/test.yml b/tests/modules/meningotype/test.yml new file mode 100644 index 00000000..c61e78a6 --- /dev/null +++ b/tests/modules/meningotype/test.yml @@ -0,0 +1,7 @@ +- name: meningotype test_meningotype + command: nextflow run tests/modules/meningotype -entry test_meningotype -c tests/config/nextflow.config + tags: + - meningotype + files: + - path: output/meningotype/test.tsv + md5sum: 25651bccb3d1c64cefcb7946fda30a6c From 316aedaaa626819c1b6eff26bb4f76383b333453 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Sat, 6 Nov 2021 15:51:15 +0000 Subject: [PATCH 208/314] bug fixes: genomicsdbimport (#1035) * saving changes to checkout * saving to sort out other branch * removed yml tracking of files that cant be tracked due to directory name changing between runs * test data added, ready for pr * fix eol linting error * Update modules/gatk4/genomicsdbimport/main.nf Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> * merging with master * update push to show progress * tests now working untar able to pass data to genomicsdbimport * commit to checkout * tests updated, module reworked to simplify and emit updated gendb * Apply suggestions from code review Co-authored-by: Harshil Patel * update meta.yml Priority of input options changed, updated to reflect this * Update test.yml name prefix changed in main script, test.yml updated to reflect this * fix tests due to review changes * bug fixes, multicalling samples and gendb emissions now fixed * Update pytest_modules.yml * Update meta.yml Co-authored-by: GCJMackenzie Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/gatk4/genomicsdbimport/main.nf | 10 +++++----- modules/gatk4/genomicsdbimport/meta.yml | 6 +++++- tests/modules/gatk4/genomicsdbimport/main.nf | 6 +++--- tests/modules/gatk4/genomicsdbimport/test.yml | 18 +++++++++--------- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index aa4fceb0..78c6b81f 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -25,18 +25,18 @@ process GATK4_GENOMICSDBIMPORT { val input_map output: - tuple val(meta), path("*_genomicsdb") , optional:true, emit: genomicsdb + tuple val(meta), path("${prefix}") , optional:true, emit: genomicsdb tuple val(meta), path("$updated_db") , optional:true, emit: updatedb tuple val(meta), path("*.interval_list"), optional:true, emit: intervallist path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" // settings for running default create gendb mode - def inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V')}" - def dir_command = "--genomicsdb-workspace-path ${prefix}" - def intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " + inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" + dir_command = "--genomicsdb-workspace-path ${prefix}" + intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " // settings changed for running get intervals list mode if run_intlist is true if (run_intlist) { diff --git a/modules/gatk4/genomicsdbimport/meta.yml b/modules/gatk4/genomicsdbimport/meta.yml index f7a32e7e..af626cb1 100644 --- a/modules/gatk4/genomicsdbimport/meta.yml +++ b/modules/gatk4/genomicsdbimport/meta.yml @@ -66,7 +66,11 @@ output: - genomicsdb: type: directory description: Directory containing the files that compose the genomicsdb workspace, this is only output for create mode, as update changes an existing db - pattern: "*_genomicsdb" + pattern: "*/$prefix" + - updatedb: + type: directory + description: Directory containing the files that compose the updated genomicsdb workspace, this is only output for update mode, and should be the same path as the input wspace. + pattern: "same/path/as/wspace" - intervallist: type: file description: File containing the intervals used to generate the genomicsdb, only created by get intervals mode. diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index ef67b04a..aff3973d 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -7,7 +7,7 @@ include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimp workflow test_gatk4_genomicsdbimport_create_genomicsdb { - input = [ [ id:'test_genomicsdb'], // meta map + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) , file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) , @@ -26,7 +26,7 @@ workflow test_gatk4_genomicsdbimport_get_intervalslist { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map [] , [] , [] , @@ -45,7 +45,7 @@ workflow test_gatk4_genomicsdbimport_update_genomicsdb { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'] , checkIfExists: true) , file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz_tbi'] , checkIfExists: true) , [] , diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 68f5ae7a..5fe2b49b 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -4,19 +4,19 @@ - gatk4/genomicsdbimport - gatk4 files: - - path: output/gatk4/test_genomicsdb/__tiledb_workspace.tdb + - path: output/gatk4/test/__tiledb_workspace.tdb md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/callset.json + - path: output/gatk4/test/callset.json md5sum: a7d07d1c86449bbb1091ff29368da07a - - path: output/gatk4/test_genomicsdb/chr22$1$40001/.__consolidation_lock + - path: output/gatk4/test/chr22$1$40001/.__consolidation_lock md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/chr22$1$40001/__array_schema.tdb - - path: output/gatk4/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + - path: output/gatk4/test/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json md5sum: 2502f79658bc000578ebcfddfc1194c0 - - path: output/gatk4/test_genomicsdb/vcfheader.vcf - contains: + - path: output/gatk4/test/vcfheader.vcf + contains: - "FORMAT= Date: Mon, 8 Nov 2021 12:43:15 -0700 Subject: [PATCH 209/314] add emmtyper module (#1028) Co-authored-by: Gregor Sturm --- modules/emmtyper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/emmtyper/main.nf | 41 +++++++++++++++++ modules/emmtyper/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/emmtyper/main.nf | 13 ++++++ tests/modules/emmtyper/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/emmtyper/functions.nf create mode 100644 modules/emmtyper/main.nf create mode 100644 modules/emmtyper/meta.yml create mode 100644 tests/modules/emmtyper/main.nf create mode 100644 tests/modules/emmtyper/test.yml diff --git a/modules/emmtyper/functions.nf b/modules/emmtyper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/emmtyper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf new file mode 100644 index 00000000..74624c1f --- /dev/null +++ b/modules/emmtyper/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process EMMTYPER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::emmtyper=0.2.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0" + } else { + container "quay.io/biocontainers/emmtyper:0.2.0--py_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + emmtyper \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) + END_VERSIONS + """ +} diff --git a/modules/emmtyper/meta.yml b/modules/emmtyper/meta.yml new file mode 100644 index 00000000..019a8e4c --- /dev/null +++ b/modules/emmtyper/meta.yml @@ -0,0 +1,43 @@ +name: emmtyper +description: EMM typing of Streptococcus pyogenes assemblies +keywords: + - fasta + - Streptococcus pyogenes + - typing +tools: + - emmtyper: + description: Streptococcus pyogenes in silico EMM typer + homepage: https://github.com/MDU-PHL/emmtyper + documentation: https://github.com/MDU-PHL/emmtyper + tool_dev_url: https://github.com/MDU-PHL/emmtyper + doi: "" + licence: ['GNU General Public v3 (GPL v3)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 37da142b..146eba06 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -362,6 +362,10 @@ dshbio/splitgff3: - modules/dshbio/splitgff3/** - tests/modules/dshbio/splitgff3/** +emmtyper: + - modules/emmtyper/** + - tests/modules/emmtyper/** + ensemblvep: - modules/ensemblvep/** - tests/modules/ensemblvep/** diff --git a/tests/modules/emmtyper/main.nf b/tests/modules/emmtyper/main.nf new file mode 100644 index 00000000..9f2181a8 --- /dev/null +++ b/tests/modules/emmtyper/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { EMMTYPER } from '../../../modules/emmtyper/main.nf' addParams( options: [:] ) + +workflow test_emmtyper { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + EMMTYPER ( input ) +} diff --git a/tests/modules/emmtyper/test.yml b/tests/modules/emmtyper/test.yml new file mode 100644 index 00000000..da59e0f1 --- /dev/null +++ b/tests/modules/emmtyper/test.yml @@ -0,0 +1,7 @@ +- name: emmtyper test_emmtyper + command: nextflow run tests/modules/emmtyper -entry test_emmtyper -c tests/config/nextflow.config + tags: + - emmtyper + files: + - path: output/emmtyper/test.tsv + md5sum: c727ba859adec9ca8ff0e091ecf79c62 From c10f9eb817b8aa6c2f1ef43c08a18e54137580fc Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 8 Nov 2021 12:52:07 -0700 Subject: [PATCH 210/314] add lissero module (#1026) * add lissero module * Update test.yml Co-authored-by: Gregor Sturm --- modules/lissero/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/lissero/main.nf | 41 +++++++++++++++++ modules/lissero/meta.yml | 44 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/lissero/main.nf | 13 ++++++ tests/modules/lissero/test.yml | 7 +++ 6 files changed, 187 insertions(+) create mode 100644 modules/lissero/functions.nf create mode 100644 modules/lissero/main.nf create mode 100644 modules/lissero/meta.yml create mode 100644 tests/modules/lissero/main.nf create mode 100644 tests/modules/lissero/test.yml diff --git a/modules/lissero/functions.nf b/modules/lissero/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/lissero/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf new file mode 100644 index 00000000..ff863aaa --- /dev/null +++ b/modules/lissero/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process LISSERO { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::lissero=0.4.9" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0" + } else { + container "quay.io/biocontainers/lissero:0.4.9--py_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + lissero \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) + END_VERSIONS + """ +} diff --git a/modules/lissero/meta.yml b/modules/lissero/meta.yml new file mode 100644 index 00000000..d4fb38df --- /dev/null +++ b/modules/lissero/meta.yml @@ -0,0 +1,44 @@ +name: lissero +description: Serogrouping Listeria monocytogenes assemblies +keywords: + - fasta + - Listeria monocytogenes + - serogroup +tools: + - lissero: + description: In silico serotyping of Listeria monocytogenes + homepage: https://github.com/MDU-PHL/LisSero/blob/master/README.md + documentation: https://github.com/MDU-PHL/LisSero/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/lissero + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" + diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 146eba06..6fd80c59 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -714,6 +714,10 @@ lima: - modules/lima/** - tests/modules/lima/** +lissero: + - modules/lissero/** + - tests/modules/lissero/** + lofreq/call: - modules/lofreq/call/** - tests/modules/lofreq/call/** diff --git a/tests/modules/lissero/main.nf b/tests/modules/lissero/main.nf new file mode 100644 index 00000000..e653bd76 --- /dev/null +++ b/tests/modules/lissero/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LISSERO } from '../../../modules/lissero/main.nf' addParams( options: [:] ) + +workflow test_lissero { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/MDU-PHL/LisSero/raw/master/tests/test_seq/NC_002973.fna", checkIfExists: true) ] + + LISSERO ( input ) +} diff --git a/tests/modules/lissero/test.yml b/tests/modules/lissero/test.yml new file mode 100644 index 00000000..19e79623 --- /dev/null +++ b/tests/modules/lissero/test.yml @@ -0,0 +1,7 @@ +- name: lissero test_lissero + command: nextflow run tests/modules/lissero -entry test_lissero -c tests/config/nextflow.config + tags: + - lissero + files: + - path: output/lissero/test.tsv + contains: ['ID', 'SEROTYPE', 'FULL'] From e0ada7d219e6e3a71b47a8579d1a822a730ed38d Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Mon, 8 Nov 2021 21:08:26 +0100 Subject: [PATCH 211/314] New module: `metabat2` (#875) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- .../jgisummarizebamcontigdepths/functions.nf | 78 +++++++++++++++++++ .../jgisummarizebamcontigdepths/main.nf | 44 +++++++++++ .../jgisummarizebamcontigdepths/meta.yml | 50 ++++++++++++ modules/metabat2/metabat2/functions.nf | 78 +++++++++++++++++++ modules/metabat2/metabat2/main.nf | 53 +++++++++++++ modules/metabat2/metabat2/meta.yml | 56 +++++++++++++ tests/config/pytest_modules.yml | 9 +++ .../jgisummarizebamcontigdepths/main.nf | 14 ++++ .../jgisummarizebamcontigdepths/test.yml | 8 ++ tests/modules/metabat2/metabat2/main.nf | 35 +++++++++ tests/modules/metabat2/metabat2/test.yml | 23 ++++++ 11 files changed, 448 insertions(+) create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/functions.nf create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/main.nf create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/meta.yml create mode 100644 modules/metabat2/metabat2/functions.nf create mode 100644 modules/metabat2/metabat2/main.nf create mode 100644 modules/metabat2/metabat2/meta.yml create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml create mode 100644 tests/modules/metabat2/metabat2/main.nf create mode 100644 tests/modules/metabat2/metabat2/test.yml diff --git a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..1860ae16 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,44 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" + } else { + container "quay.io/biocontainers/metabat2:2.15--h986a166_1" + } + + input: + tuple val(meta), path(bam), path(bai) + + output: + tuple val(meta), path("*.txt.gz"), emit: depth + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + export OMP_NUM_THREADS=$task.cpus + + jgi_summarize_bam_contig_depths \\ + --outputDepth ${prefix}.txt \\ + $options.args \\ + $bam + + bgzip --threads $task.cpus ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/meta.yml b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml new file mode 100644 index 00000000..351a4701 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml @@ -0,0 +1,50 @@ +name: metabat2_jgisummarizebamcontigdepths +description: Depth computation per contig step of metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Sorted BAM file of reads aligned on the assembled contigs + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bam.bai" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - depth: + type: file + description: Text file listing the coverage per contig + pattern: ".txt.gz" + +authors: + - "@maxibor" diff --git a/modules/metabat2/metabat2/functions.nf b/modules/metabat2/metabat2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/metabat2/metabat2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..589e268c --- /dev/null +++ b/modules/metabat2/metabat2/main.nf @@ -0,0 +1,53 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process METABAT2_METABAT2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" + } else { + container "quay.io/biocontainers/metabat2:2.15--h986a166_1" + } + + input: + tuple val(meta), path(fasta), path(depth) + + output: + tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta + tuple val(meta), path("*.tsv.gz"), optional:true , emit: membership + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def decompress_depth = depth ? "gzip -d -f $depth" : "" + def depth_file = depth ? "-a ${depth.baseName}" : "" + """ + $decompress_depth + + metabat2 \\ + $options.args \\ + -i $fasta \\ + $depth_file \\ + -t $task.cpus \\ + --saveCls \\ + -o metabat2/${prefix} + + mv metabat2/${prefix} ${prefix}.tsv + mv metabat2 bins + bgzip --threads $task.cpus ${prefix}.tsv + bgzip --threads $task.cpus bins/*.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/metabat2/meta.yml b/modules/metabat2/metabat2/meta.yml new file mode 100644 index 00000000..a7f3a7ff --- /dev/null +++ b/modules/metabat2/metabat2/meta.yml @@ -0,0 +1,56 @@ +name: metabat2_metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta file of the assembled contigs + pattern: "*.{fa,fas,fasta,fna,fa.gz,fas.gz,fasta.gz,fna.gz}" + - depth: + type: file + description: | + Optional text file listing the coverage per contig pre-generated + by metabat2_jgisummarizebamcontigdepths + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fasta: + type: file + description: Bins created from assembled contigs in fasta file + pattern: "*.fa.gz" + - membership: + type: file + description: cluster memberships as a matrix format. + pattern: "*.tsv.gz" + + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6fd80c59..4edf5ec6 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -774,10 +774,19 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +metabat2/jgisummarizebamcontigdepths: + - modules/metabat2/jgisummarizebamcontigdepths/** + - tests/modules/metabat2/jgisummarizebamcontigdepths/** + +metabat2/metabat2: + - modules/metabat2/metabat2/** + - tests/modules/metabat2/metabat2/** + meningotype: - modules/meningotype/** - tests/modules/meningotype/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..2cfc2e2c --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) + +workflow test_metabat2_jgisummarizebamcontigdepths { + + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input ) +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml new file mode 100644 index 00000000..d318c6d4 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml @@ -0,0 +1,8 @@ +- name: metabat2 jgisummarizebamcontigdepths test_metabat2_jgisummarizebamcontigdepths + command: nextflow run tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c tests/config/nextflow.config + tags: + - metabat2/jgisummarizebamcontigdepths + - metabat2 + files: + - path: output/metabat2/test.txt.gz + md5sum: 8f735aa408d6c90e5a0310e06ace7a9a diff --git a/tests/modules/metabat2/metabat2/main.nf b/tests/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..3d01f194 --- /dev/null +++ b/tests/modules/metabat2/metabat2/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) + +workflow test_metabat2_no_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it, []] } + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} + +workflow test_metabat2_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml new file mode 100644 index 00000000..7b3435b7 --- /dev/null +++ b/tests/modules/metabat2/metabat2/test.yml @@ -0,0 +1,23 @@ +- name: metabat2 metabat2 test_metabat2_no_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config + tags: + - metabat2/metabat2 + - metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + md5sum: ea77e8c4426d2337419905b57f1ec335 + +- name: metabat2 metabat2 test_metabat2_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config + tags: + - metabat2/metabat2 + - metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + md5sum: ea77e8c4426d2337419905b57f1ec335 + - path: output/metabat2/test.txt.gz + md5sum: 8f735aa408d6c90e5a0310e06ace7a9a From 9573cb1bec52de3d50de3f277c28366f3c5795fe Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Tue, 9 Nov 2021 10:16:43 +0000 Subject: [PATCH 212/314] Add panel of normals subworkflow (#1044) * commiting changes to switch branch * commit to setup remote branch * first draft of the sompon workflow * keep branch in line with gendb bugfixing * Update test.yml * tidy up main.nf * fixed md5sum Co-authored-by: GCJMackenzie --- .../nf-core/gatk_create_som_pon/main.nf | 58 ++++++++++++++ .../nf-core/gatk_create_som_pon/meta.yml | 75 +++++++++++++++++++ .../gatk_create_som_pon/nextflow.config | 3 + tests/config/pytest_subworkflows.yml | 5 ++ .../nf-core/gatk_create_som_pon/main.nf | 26 +++++++ .../nf-core/gatk_create_som_pon/test.yml | 38 ++++++++++ 6 files changed, 205 insertions(+) create mode 100644 subworkflows/nf-core/gatk_create_som_pon/main.nf create mode 100644 subworkflows/nf-core/gatk_create_som_pon/meta.yml create mode 100644 subworkflows/nf-core/gatk_create_som_pon/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_create_som_pon/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_create_som_pon/test.yml diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..9b190584 --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,58 @@ +// +// Run GATK mutect2, genomicsdbimport and createsomaticpanelofnormals +// + +params.mutect2_options = [args: '--max-mnp-distance 0'] +params.gendbimport_options = [:] +params.createsompon_options = [:] + +include { GATK4_MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GENOMICSDBIMPORT } from '../../../modules/gatk4/genomicsdbimport/main' addParams( options: params.gendbimport_options ) +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../modules/gatk4/createsomaticpanelofnormals/main' addParams( options: params.createsompon_options ) + +workflow GATK_CREATE_SOM_PON { + take: + ch_mutect2_in // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fastaidx // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + pon_name // channel: name for panel of normals + interval_file // channel: /path/to/interval/file + + main: + ch_versions = Channel.empty() + input = channel.from(ch_mutect2_in) + // + //Perform variant calling for each sample using mutect2 module in panel of normals mode. + // + GATK4_MUTECT2 ( input , false , true, false , [] , fasta , fastaidx , dict , [], [] , [] , [] ) + ch_versions = ch_versions.mix(GATK4_MUTECT2.out.versions.first()) + + // + //Convert all sample vcfs into a genomicsdb workspace using genomicsdbimport. + // + ch_vcf = GATK4_MUTECT2.out.vcf.collect{it[1]}.toList() + ch_index = GATK4_MUTECT2.out.tbi.collect{it[1]}.toList() + gendb_input = Channel.of([[ id:pon_name ]]).combine(ch_vcf).combine(ch_index).combine([interval_file]).combine(['']).combine([dict]) + GATK4_GENOMICSDBIMPORT ( gendb_input, false, false, false ) + ch_versions = ch_versions.mix(GATK4_GENOMICSDBIMPORT.out.versions.first()) + + // + //Panel of normals made from genomicsdb workspace using createsomaticpanelofnormals. + // + GATK4_GENOMICSDBIMPORT.out.genomicsdb.view() + GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fastaidx, dict ) + ch_versions = ch_versions.mix(GATK4_CREATESOMATICPANELOFNORMALS.out.versions.first()) + + emit: + mutect2_vcf = GATK4_MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = GATK4_MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = GATK4_MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + genomicsdb = GATK4_GENOMICSDBIMPORT.out.genomicsdb // channel: [ val(meta), [ genomicsdb ] ] + + pon_vcf = GATK4_CREATESOMATICPANELOFNORMALS.out.vcf // channel: [ val(meta), [ vcf.gz ] ] + pon_index = GATK4_CREATESOMATICPANELOFNORMALS.out.tbi // channel: [ val(meta), [ tbi ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_create_som_pon/meta.yml b/subworkflows/nf-core/gatk_create_som_pon/meta.yml new file mode 100644 index 00000000..bc02b885 --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/meta.yml @@ -0,0 +1,75 @@ +name: gatk_create_som_pon +description: Perform variant calling on a set of normal samples using mutect2 panel of normals mode. Group them into a genomicsdbworkspace using genomicsdbimport, then use this to create a panel of normals using createsomaticpanelofnormals. +keywords: + - gatk4 + - mutect2 + - genomicsdbimport + - createsomaticpanelofnormals + - variant_calling + - genomicsdb_workspace + - panel_of_normals +modules: + - gatk4/mutect2 + - gatk4/genomicsdbimport + - gatk4/createsomaticpanelofnormals +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list of BAM files, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list of BAM file indexes, also able to take CRAM indexes as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fastaidx: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - pon_name: + type: String + Description: name to be used for the genomicsdb workspace and panel of normals as meta_id has the individual sample names and a name for the combined files is reuired here. + pattern: "example_name" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: list + description: List of compressed vcf files to be used to make the gendb workspace + pattern: "[ *.vcf.gz ]" + - mutect2_index: + type: list + description: List of indexes of mutect2_vcf files + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: list + description: List of stats files that pair with mutect2_vcf files + pattern: "[ *vcf.gz.stats ]" + - genomicsdb: + type: directory + description: Directory containing the files that compose the genomicsdb workspace. + pattern: "path/name_of_workspace" + - pon_vcf: + type: file + description: Panel of normal as compressed vcf file + pattern: "*.vcf.gz" + - pon_index: + type: file + description: Index of pon_vcf file + pattern: "*vcf.gz.tbi" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_create_som_pon/nextflow.config b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config new file mode 100644 index 00000000..6f560c9e --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config @@ -0,0 +1,3 @@ +params.mutect2_options = [:] +params.gendbimport_options = [:] +params.createsompon_options = [:] diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml index 84919be8..4f9c5514 100644 --- a/tests/config/pytest_subworkflows.yml +++ b/tests/config/pytest_subworkflows.yml @@ -14,3 +14,8 @@ subworkflows/sra_fastq: - subworkflows/nf-core/sra_fastq/** - tests/subworkflows/nf-core/sra_fastq/** +subworkflows/gatk_create_som_pon: + - subworkflows/nf-core/gatk_create_som_pon/** + - tests/subworkflows/nf-core/gatk_create_som_pon/** + + \ No newline at end of file diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..d484ac2f --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_CREATE_SOM_PON } from '../../../../subworkflows/nf-core/gatk_create_som_pon/main' addParams( [:] ) + +workflow test_gatk_create_som_pon { + ch_mutect2_in = [ + [[ id:'test1' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ], + [[ id:'test2' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + pon_name = "test_panel" + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fastaidx, dict, pon_name, interval_file ) + +} diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml new file mode 100644 index 00000000..eae34dd4 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -0,0 +1,38 @@ +- name: gatk_create_som_pon + command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config + tags: + - subworkflows/gatk_create_som_pon + # Modules + - gatk4 + - gatk4 + - gatk4/genomicsdbimport + - gatk4/createsomaticpanelofnormals + files: + # gatk4 mutect2 + - path: output/gatk4/test1.vcf.gz + - path: output/gatk4/test1.vcf.gz.stats + md5sum: 4f77301a125913170b8e9e7828b4ca3f + - path: output/gatk4/test1.vcf.gz.tbi + - path: output/gatk4/test2.vcf.gz + - path: output/gatk4/test2.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 + - path: output/gatk4/test2.vcf.gz.tbi + # gatk4 genomicsdbimport + - path: output/gatk4/test_panel/__tiledb_workspace.tdb + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/callset.json + md5sum: 2ab411773b7267de61f8c04939de2a99 + - path: output/gatk4/test_panel/chr22$1$40001/.__consolidation_lock + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test_panel/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + md5sum: 2502f79658bc000578ebcfddfc1194c0 + - path: output/gatk4/test_panel/vcfheader.vcf + contains: + - "FORMAT= Date: Tue, 9 Nov 2021 11:08:59 +0000 Subject: [PATCH 213/314] bugfix: panel of normals subworkflow: remove md5sum on a gzipped file (#1045) * commiting changes to switch branch * commit to setup remote branch * first draft of the sompon workflow * keep branch in line with gendb bugfixing * Update test.yml * tidy up main.nf * fixed md5sum * Update test.yml removed md5sum from test_panel.vcf.gz.tbi * Update test.yml * remove md5sum from gzipped test_panel.vcf.gz.tbi Co-authored-by: GCJMackenzie --- tests/subworkflows/nf-core/gatk_create_som_pon/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index eae34dd4..7c9e7ac0 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -35,4 +35,3 @@ # gatk4 createsomaticpanelofnormals - path: output/gatk4/test_panel.vcf.gz - path: output/gatk4/test_panel.vcf.gz.tbi - md5sum: d7e2524ba4bf7538dbee3e225a74b0da From 6d3d8306e1ce8a096ef33f2ecfbc4d7b2ec91687 Mon Sep 17 00:00:00 2001 From: anan220606 <81744003+Darcy220606@users.noreply.github.com> Date: Tue, 9 Nov 2021 14:05:23 +0100 Subject: [PATCH 214/314] Add new module Mapdamage2 (#975) * Fitst attempt at mapdamage2 * Add new module mapdamage2 * Removed __pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc * Modify main.nf and meta.yml * Modify main.nf and meta.yml * Modify main.nf and meta.yml * Modify meta.yml * Update pytest_modules.yml * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * edit the meta.yml and main.nf after reviews * Update meta.yml * Update meta.yml Co-authored-by: AIbrahim Co-authored-by: James A. Fellows Yates --- modules/mapdamage2/functions.nf | 78 ++++++++++++++++++++ modules/mapdamage2/main.nf | 58 +++++++++++++++ modules/mapdamage2/meta.yml | 114 ++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mapdamage2/main.nf | 15 ++++ tests/modules/mapdamage2/test.yml | 25 +++++++ 6 files changed, 294 insertions(+) create mode 100644 modules/mapdamage2/functions.nf create mode 100644 modules/mapdamage2/main.nf create mode 100644 modules/mapdamage2/meta.yml create mode 100644 tests/modules/mapdamage2/main.nf create mode 100644 tests/modules/mapdamage2/test.yml diff --git a/modules/mapdamage2/functions.nf b/modules/mapdamage2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mapdamage2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf new file mode 100644 index 00000000..e252e27c --- /dev/null +++ b/modules/mapdamage2/main.nf @@ -0,0 +1,58 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MAPDAMAGE2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mapdamage2=2.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0" + } else { + container "quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0" + } + + input: + tuple val(meta), path(bam) + path(fasta) + + output: + tuple val(meta), path("results_*/Runtime_log.txt") ,emit: runtime_log + tuple val(meta), path("results_*/Fragmisincorporation_plot.pdf"), optional: true ,emit: fragmisincorporation_plot + tuple val(meta), path("results_*/Length_plot.pdf"), optional: true ,emit: length_plot + tuple val(meta), path("results_*/misincorporation.txt"), optional: true ,emit: misincorporation + tuple val(meta), path("results_*/lgdistribution.txt"), optional: true ,emit: lgdistribution + tuple val(meta), path("results_*/dnacomp.txt"), optional: true ,emit: dnacomp + tuple val(meta), path("results_*/Stats_out_MCMC_hist.pdf"), optional: true ,emit: stats_out_mcmc_hist + tuple val(meta), path("results_*/Stats_out_MCMC_iter.csv"), optional: true ,emit: stats_out_mcmc_iter + tuple val(meta), path("results_*/Stats_out_MCMC_trace.pdf"), optional: true ,emit: stats_out_mcmc_trace + tuple val(meta), path("results_*/Stats_out_MCMC_iter_summ_stat.csv"), optional: true ,emit: stats_out_mcmc_iter_summ_stat + tuple val(meta), path("results_*/Stats_out_MCMC_post_pred.pdf"), optional: true ,emit: stats_out_mcmc_post_pred + tuple val(meta), path("results_*/Stats_out_MCMC_correct_prob.csv"), optional: true ,emit: stats_out_mcmc_correct_prob + tuple val(meta), path("results_*/dnacomp_genome.csv"), optional: true ,emit: dnacomp_genome + tuple val(meta), path("results_*/rescaled.bam"), optional: true ,emit: rescaled + tuple val(meta), path("results_*/5pCtoT_freq.txt"), optional: true ,emit: pctot_freq + tuple val(meta), path("results_*/3pGtoA_freq.txt"), optional: true ,emit: pgtoa_freq + tuple val(meta), path("results_*/*.fasta"), optional: true ,emit: fasta + tuple val(meta), path("*/"), optional: true ,emit: folder + path "versions.yml",emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + mapDamage \\ + $options.args \\ + -i $bam \\ + -r $fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(mapDamage --version)) + END_VERSIONS + """ +} diff --git a/modules/mapdamage2/meta.yml b/modules/mapdamage2/meta.yml new file mode 100644 index 00000000..e511a0a6 --- /dev/null +++ b/modules/mapdamage2/meta.yml @@ -0,0 +1,114 @@ +name: mapdamage2 + +description: Computational framework for tracking and quantifying DNA damage patterns among ancient DNA sequencing reads generated by Next-Generation Sequencing platforms. +keywords: + - ancient DNA + - DNA damage + - NGS + - damage patterns + - bam +tools: + - mapdamage2: + description: Tracking and quantifying damage patterns in ancient DNA sequences + homepage: http://ginolhac.github.io/mapDamage/ + documentation: https://ginolhac.github.io/mapDamage/ + tool_dev_url: https://github.com/ginolhac/mapDamage + doi: "10.1093/bioinformatics/btt193" + licence: ['MIT'] + +input: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.{bam}" + - fasta: + type: file + description: Fasta file, the reference the input BAM was mapped against + pattern: "*.{fasta}" + +output: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - runtime_log: + type: file + description: Log file with a summary of command lines used and timestamps. + pattern: "Runtime_log.txt" + - fragmisincorporation_plot: + type: file + description: A pdf file that displays both fragmentation and misincorporation patterns. + pattern: "Fragmisincorporation_plot.pdf" + - length_plot: + type: file + description: A pdf file that displays length distribution of singleton reads per strand and cumulative frequencies of C->T at 5'-end and G->A at 3'-end are also displayed per strand. + pattern: "Length_plot.pdf" + - misincorporation: + type: file + description: Contains a table with occurrences for each type of mutations and relative positions from the reads ends. + pattern: "misincorporation.txt" + - pctot_freq: + type: file + description: Contains frequencies of Cytosine to Thymine mutations per position from the 5'-ends. + pattern: "5pCtoT_freq.txt" + - pgtoa_freq: + type: file + description: Contains frequencies of Guanine to Adenine mutations per position from the 3'-ends. + pattern: "3pGtoA_freq.txt" + - dnacomp: + type: file + description: Contains a table of the reference genome base composition per position, inside reads and adjacent regions. + pattern: "dnacomp.txt" + - lgdistribution: + type: file + description: Contains a table with read length distributions per strand. + pattern: "lgdistribution.txt" + - stats_out_mcmc_hist: + type: file + description: A MCMC histogram for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_hist.pdf" + - stats_out_mcmc_iter: + type: file + description: Values for the damage parameters and log likelihood in each MCMC iteration. + pattern: "Stats_out_MCMC_iter.csv" + - stats_out_mcmc_trace: + type: file + description: A MCMC trace plot for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_trace.pdf" + - stats_out_mcmc_iter_summ_stat: + type: file + description: Summary statistics for the damage parameters estimated posterior distributions. + pattern: "Stats_out_MCMC_iter_summ_stat.csv" + - stats_out_mcmc_post_pred: + type: file + description: Empirical misincorporation frequency and posterior predictive intervals from the fitted model. + pattern: "Stats_out_MCMC_post_pred.pdf" + - stats_out_mcmc_correct_prob: + type: file + description: Position specific probability of a C->T and G->A misincorporation is due to damage. + pattern: "Stats_out_MCMC_correct_prob.csv" + - dnacomp_genome: + type: file + description: Contains the global reference genome base composition (computed by seqtk). + pattern: "dnacomp_genome.csv" + - rescaled: + type: file + description: Rescaled BAM file, where likely post-mortem damaged bases have downscaled quality scores. + pattern: "*.{bam}" + - fasta: + type: file + description: Allignments in a FASTA file, only if flagged by -d. + pattern: "*.{fasta}" + - folder: + type: folder + description: Folder created when --plot-only, --rescale and --stats-only flags are passed. + pattern: "*/" + +authors: +- "@darcy220606" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4edf5ec6..f1f5c096 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -758,6 +758,10 @@ manta/tumoronly: - modules/manta/tumoronly/** - tests/modules/manta/tumoronly/** +mapdamage2: + - modules/mapdamage2/** + - tests/modules/mapdamage2/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/mapdamage2/main.nf b/tests/modules/mapdamage2/main.nf new file mode 100644 index 00000000..a4a0eb02 --- /dev/null +++ b/tests/modules/mapdamage2/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' addParams( options: [:] ) + +workflow test_mapdamage2 { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + MAPDAMAGE2 ( input, fasta ) +} diff --git a/tests/modules/mapdamage2/test.yml b/tests/modules/mapdamage2/test.yml new file mode 100644 index 00000000..657f59b5 --- /dev/null +++ b/tests/modules/mapdamage2/test.yml @@ -0,0 +1,25 @@ +- name: mapdamage2 test_mapdamage2 + command: nextflow run tests/modules/mapdamage2 -entry test_mapdamage2 -c tests/config/nextflow.config + tags: + - mapdamage2 + files: + - path: output/mapdamage2/results_test.paired_end.sorted/3pGtoA_freq.txt + md5sum: 3b300b8d2842441675cb2b56740801f0 + - path: output/mapdamage2/results_test.paired_end.sorted/5pCtoT_freq.txt + md5sum: 4c27465cd02e1fb8bf6fb2b01e98446d + - path: output/mapdamage2/results_test.paired_end.sorted/Fragmisincorporation_plot.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Runtime_log.txt + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_correct_prob.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_hist.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter_summ_stat.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_post_pred.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_trace.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp.txt + md5sum: 4244d9fa554bbfeebbcea8eba3ad6466 + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp_genome.csv + md5sum: ea91a3d205717d3c6b3e0b77bb840945 + - path: output/mapdamage2/results_test.paired_end.sorted/lgdistribution.txt + md5sum: f86dfc04b1fff4337cc91add6356e3a0 + - path: output/mapdamage2/results_test.paired_end.sorted/misincorporation.txt + md5sum: 1c89b4c96d1f8996c3d0879cad5129a5 From 6bb4a6a7eefdd2c53e15eca51949c05a503523c4 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Tue, 9 Nov 2021 14:03:13 +0000 Subject: [PATCH 215/314] Implement `plink/extract` module (#901) * Implement PLINK_EXTRACT module * fix plink version number * Update main.nf * Update test_data.config * Update modules/plink/extract/main.nf Co-authored-by: Harshil Patel * just use one channel * fix test with new channel input Co-authored-by: Harshil Patel --- modules/plink/extract/functions.nf | 78 ++++++++++++++++++++++++++++ modules/plink/extract/main.nf | 47 +++++++++++++++++ modules/plink/extract/meta.yml | 62 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/plink/extract/main.nf | 29 +++++++++++ tests/modules/plink/extract/test.yml | 18 +++++++ 7 files changed, 240 insertions(+) create mode 100644 modules/plink/extract/functions.nf create mode 100644 modules/plink/extract/main.nf create mode 100644 modules/plink/extract/meta.yml create mode 100644 tests/modules/plink/extract/main.nf create mode 100644 tests/modules/plink/extract/test.yml diff --git a/modules/plink/extract/functions.nf b/modules/plink/extract/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/plink/extract/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf new file mode 100644 index 00000000..2e18500a --- /dev/null +++ b/modules/plink/extract/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PLINK_EXTRACT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" + } else { + container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" + } + + input: + tuple val(meta), path(bed), path(bim), path(fam), path(variants) + + output: + tuple val(meta), path("*.bed"), emit: bed + tuple val(meta), path("*.bim"), emit: bim + tuple val(meta), path("*.fam"), emit: fam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + plink \\ + --bfile ${meta.id} \\ + $options.args \\ + --extract $variants \\ + --threads $task.cpus \\ + --make-bed \\ + --out $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') + END_VERSIONS + """ +} diff --git a/modules/plink/extract/meta.yml b/modules/plink/extract/meta.yml new file mode 100644 index 00000000..3978fbb4 --- /dev/null +++ b/modules/plink/extract/meta.yml @@ -0,0 +1,62 @@ +name: plink_extract +description: Subset plink bfiles with a text file of variant identifiers +keywords: + - extract + - plink +tools: + - plink: + description: Whole genome association analysis toolset, designed to perform a range of basic, large-scale analyses in a computationally efficient manner. + homepage: None + documentation: None + tool_dev_url: None + doi: "" + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + - variants: + type: file + description: A text file containing variant identifiers to keep (one per line) + pattern: "*.{keep}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + +authors: + - "@nebfield" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f1f5c096..39a1393d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -936,6 +936,10 @@ plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** +plink/extract: + - modules/plink/extract/** + - tests/modules/plink/extract/** + plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 2d30880f..4ea333cb 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -119,8 +119,10 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" + index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } diff --git a/tests/modules/plink/extract/main.nf b/tests/modules/plink/extract/main.nf new file mode 100644 index 00000000..e031a7b7 --- /dev/null +++ b/tests/modules/plink/extract/main.nf @@ -0,0 +1,29 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams ( options: [args:'--make-bed --set-missing-var-ids @:#:\\$1:\\$2']) +include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' addParams( options: [suffix:'.extract'] ) + +workflow test_plink_extract { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + PLINK_VCF ( input ) + + PLINK_VCF.out.bim + .splitText(file: 'variants.keep', keepHeader: false, by: 10) + .first() + .set { ch_variants } + + PLINK_VCF.out.bed + .concat(PLINK_VCF.out.bim, PLINK_VCF.out.fam.concat(ch_variants)) + .groupTuple() + .map{ meta, paths -> [meta, paths[0], paths[1], paths[2], paths[3]] } + .set { ch_extract } + + PLINK_EXTRACT ( ch_extract ) +} diff --git a/tests/modules/plink/extract/test.yml b/tests/modules/plink/extract/test.yml new file mode 100644 index 00000000..40569d9d --- /dev/null +++ b/tests/modules/plink/extract/test.yml @@ -0,0 +1,18 @@ +- name: plink extract test_plink_extract + command: nextflow run tests/modules/plink/extract -entry test_plink_extract -c tests/config/nextflow.config + tags: + - plink + - plink/extract + files: + - path: output/plink/test.bed + md5sum: 9121010aba9905eee965e96bc983611d + - path: output/plink/test.bim + md5sum: 510ec606219ee5daaf5c207cb01554bf + - path: output/plink/test.extract.bed + md5sum: 9e02f7143bcc756a51f20d50ca7f8032 + - path: output/plink/test.extract.bim + md5sum: 63d190aea4094aa5d042aacd63397f94 + - path: output/plink/test.extract.fam + md5sum: c499456df4da78792ef29934ef3cd47d + - path: output/plink/test.fam + md5sum: c499456df4da78792ef29934ef3cd47d From b399f22af241b6d7d4a2f7aa5616bf21b9cff2f9 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Tue, 9 Nov 2021 16:12:51 +0100 Subject: [PATCH 216/314] Add new module: cmseq/polymut (#918) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * first cmseq commit * compressing and removing not reproducible md5sums * save intermediate work * follow symlinks while decompressing * add cmseq/polymut * add polymut * add extra test with optional input file * remove metabat2 * Update modules/cmseq/polymut/main.nf Co-authored-by: James A. Fellows Yates * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * fix file extension * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * add test without bam index * split tests in workflows * answer PR review * report version from variable Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/cmseq/polymut/functions.nf | 78 ++++++++++++++++++++++++++++ modules/cmseq/polymut/main.nf | 46 ++++++++++++++++ modules/cmseq/polymut/meta.yml | 61 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/cmseq/polymut/main.nf | 38 ++++++++++++++ tests/modules/cmseq/polymut/test.yml | 26 ++++++++++ 6 files changed, 253 insertions(+) create mode 100644 modules/cmseq/polymut/functions.nf create mode 100644 modules/cmseq/polymut/main.nf create mode 100644 modules/cmseq/polymut/meta.yml create mode 100644 tests/modules/cmseq/polymut/main.nf create mode 100644 tests/modules/cmseq/polymut/test.yml diff --git a/modules/cmseq/polymut/functions.nf b/modules/cmseq/polymut/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cmseq/polymut/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..4c061e26 --- /dev/null +++ b/modules/cmseq/polymut/main.nf @@ -0,0 +1,46 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '1.0.4' + +process CMSEQ_POLYMUT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cmseq=1.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0" + } else { + container "quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0" + } + + input: + tuple val(meta), path(bam), path(bai), path(gff), path(fasta) + + output: + tuple val(meta), path("*.txt"), emit: polymut + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def fasta_refid = fasta ? "-c $fasta" : "" + def sortindex = bai ? "" : "--sortindex" + """ + polymut.py \\ + $options.args \\ + $sortindex \\ + $fasta_refid \\ + --gff_file $gff \\ + $bam > ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ +} diff --git a/modules/cmseq/polymut/meta.yml b/modules/cmseq/polymut/meta.yml new file mode 100644 index 00000000..49e6b519 --- /dev/null +++ b/modules/cmseq/polymut/meta.yml @@ -0,0 +1,61 @@ +name: cmseq_polymut +description: Calculates polymorphic site rates over protein coding genes +keywords: + - polymut + - polymorphic + - mags + - assembly + - polymorphic sites + - estimation + - protein coding genes + - cmseq + - bam + - coverage +tools: + - cmseq: + description: Set of utilities on sequences and BAM files + homepage: https://github.com/SegataLab/cmseq + documentation: https://github.com/SegataLab/cmseq + tool_dev_url: https://github.com/SegataLab/cmseq + licence: ['MIT License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - gff: + type: file + description: GFF file used to extract protein-coding genes + pattern: "*.gff" + - fasta: + type: file + description: Optional fasta file to run on a subset of references in the BAM file. + pattern: .{fa,fasta,fas,fna} + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - polymut: + type: file + description: Polymut report in `.txt` format. + pattern: "*.txt" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 39a1393d..2e5b55f1 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -274,6 +274,10 @@ chromap/index: - modules/chromap/index/** - tests/modules/chromap/index/** +cmseq/polymut: + - modules/cmseq/polymut/** + - tests/modules/cmseq/polymut/** + cnvkit: - modules/cnvkit/** - tests/modules/cnvkit/** diff --git a/tests/modules/cmseq/polymut/main.nf b/tests/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..729ed38f --- /dev/null +++ b/tests/modules/cmseq/polymut/main.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' addParams( options: [:] ) + +workflow test_cmseq_polymut_1 { + + input_1 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_1 ) + +} + +workflow test_cmseq_polymut_2 { + input_2 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_2 ) +} + +workflow test_cmseq_polymut_3 { + input_3 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), ] + + CMSEQ_POLYMUT( input_3 ) +} + diff --git a/tests/modules/cmseq/polymut/test.yml b/tests/modules/cmseq/polymut/test.yml new file mode 100644 index 00000000..2a989cb9 --- /dev/null +++ b/tests/modules/cmseq/polymut/test.yml @@ -0,0 +1,26 @@ +- name: cmseq polymut test_cmseq_polymut_1 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_2 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_3 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc From 1abe23e1592f7cf6a3724589061827df0d8a0252 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 10 Nov 2021 10:30:48 +0100 Subject: [PATCH 217/314] Add CRAM to samtools/index (#1049) * feat: update samtools/index * feat: add test * fix: lint * fix: output file + md5 --- modules/samtools/index/main.nf | 11 ++++++----- modules/samtools/index/meta.yml | 5 +++++ tests/modules/samtools/index/main.nf | 13 +++++++++++-- tests/modules/samtools/index/test.yml | 9 +++++++++ 4 files changed, 31 insertions(+), 7 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index febbc11c..62254bc8 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -19,16 +19,17 @@ process SAMTOOLS_INDEX { } input: - tuple val(meta), path(bam) + tuple val(meta), path(input) output: - tuple val(meta), path("*.bai"), optional:true, emit: bai - tuple val(meta), path("*.csi"), optional:true, emit: csi - path "versions.yml" , emit: versions + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.crai"), optional:true, emit: crai + tuple val(meta), path("*.csi") , optional:true, emit: csi + path "versions.yml" , emit: versions script: """ - samtools index $options.args $bam + samtools index $options.args $input cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 988e8f53..0905b3cd 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -35,6 +35,10 @@ output: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" + - crai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" - csi: type: file description: CSI index file @@ -46,3 +50,4 @@ output: authors: - "@drpatelh" - "@ewels" + - "@maxulysse" diff --git a/tests/modules/samtools/index/main.nf b/tests/modules/samtools/index/main.nf index be9014e0..737936fb 100644 --- a/tests/modules/samtools/index/main.nf +++ b/tests/modules/samtools/index/main.nf @@ -2,8 +2,9 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) workflow test_samtools_index_bai { input = [ [ id:'test', single_end:false ], // meta map @@ -13,6 +14,14 @@ workflow test_samtools_index_bai { SAMTOOLS_INDEX_BAI ( input ) } +workflow test_samtools_index_crai { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true) + ] + + SAMTOOLS_INDEX_CRAI ( input ) +} + workflow test_samtools_index_csi { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 31941dd6..66ab8211 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -7,6 +7,15 @@ - path: output/samtools/test.paired_end.sorted.bam.bai md5sum: 704c10dd1326482448ca3073fdebc2f4 +- name: samtools index crai + command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config + tags: + - samtools + - samtools/index + files: + - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai + md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 + - name: samtools index csi command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config tags: From 7fdeed5b79517357758900d3d52e2ffe28c47102 Mon Sep 17 00:00:00 2001 From: Mei Wu Date: Wed, 10 Nov 2021 10:52:54 +0100 Subject: [PATCH 218/314] Picard/collecthsmetrics (#927) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added template * integrated module * added fasta index info * test works, have placeholder data for baits until test-data PR is merged * added new files to config * updated test files * fixing fails :sparkles: * okay final fix here on the md5sum :face_palm: * md5sum variable * update meta.yml to reflect consistency to main.nf * reverted version so conda works * Apply suggestions from code review Co-authored-by: Sébastien Guizard * md5sum can't be generated consistently for output Co-authored-by: Sébastien Guizard --- modules/picard/collecthsmetrics/functions.nf | 78 +++++++++++++++++++ modules/picard/collecthsmetrics/main.nf | 58 ++++++++++++++ modules/picard/collecthsmetrics/meta.yml | 66 ++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 3 + tests/modules/picard/collecthsmetrics/main.nf | 18 +++++ .../modules/picard/collecthsmetrics/test.yml | 8 ++ 7 files changed, 235 insertions(+) create mode 100644 modules/picard/collecthsmetrics/functions.nf create mode 100644 modules/picard/collecthsmetrics/main.nf create mode 100644 modules/picard/collecthsmetrics/meta.yml create mode 100644 tests/modules/picard/collecthsmetrics/main.nf create mode 100644 tests/modules/picard/collecthsmetrics/test.yml diff --git a/modules/picard/collecthsmetrics/functions.nf b/modules/picard/collecthsmetrics/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/picard/collecthsmetrics/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..1f7ad8e6 --- /dev/null +++ b/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,58 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PICARD_COLLECTHSMETRICS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0" + } else { + container "quay.io/biocontainers/picard:2.26.2--hdfd78af_0" + } + + input: + tuple val(meta), path(bam) + path fasta + path fai + path bait_intervals + path target_intervals + + output: + tuple val(meta), path("*collecthsmetrics.txt"), emit: hs_metrics + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def reference = fasta ? "-R $fasta" : "" + + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard CollectHsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + -Xmx${avail_mem}g \\ + CollectHsMetrics \\ + $options.args \\ + $reference \\ + -BAIT_INTERVALS $bait_intervals \\ + -TARGET_INTERVALS $target_intervals \\ + -INPUT $bam \\ + -OUTPUT ${prefix}_collecthsmetrics.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ +} diff --git a/modules/picard/collecthsmetrics/meta.yml b/modules/picard/collecthsmetrics/meta.yml new file mode 100644 index 00000000..4b94909f --- /dev/null +++ b/modules/picard/collecthsmetrics/meta.yml @@ -0,0 +1,66 @@ +name: picard_collecthsmetrics +description: Collects hybrid-selection (HS) metrics for a SAM or BAM file. +keywords: + - alignment + - metrics + - statistics + - insert + - hybrid-selection + - quality + - bam +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + tool_dev_url: https://github.com/broadinstitute/picard/ + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: An aligned BAM/SAM file + pattern: "*.{bam,sam}" + - fasta: + type: file + description: | + A reference file to calculate dropout metrics measuring reduced representation of reads. + Optional input. + pattern: "*.fasta" + - fai: + type: file + description: Index of FASTA file. Only needed when fasta is supplied. + pattern: "*.fai" + - bait_intervals: + type: file + description: An interval list file that contains the locations of the baits used. + pattern: "baits.interval_list" + - target_intervals: + type: file + description: An interval list file that contains the locations of the targets. + pattern: "targets.interval_list" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - hs_metrics: + type: file + description: The metrics file. + pattern: "*_collecthsmetrics.txt" + +authors: + - "@projectoriented" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2e5b55f1..8dfe67cf 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -908,6 +908,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +picard/collecthsmetrics: + - modules/picard/collecthsmetrics/** + - tests/modules/picard/collecthsmetrics/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 4ea333cb..6cb494f7 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -34,6 +34,9 @@ params { contigs_genome_maf_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.maf.gz" contigs_genome_par = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.par" lastdb_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/lastdb.tar.gz" + + baits_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/baits.interval_list" + targets_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/targets.interval_list" } 'illumina' { test_single_end_bam = "${test_data_dir}/genomics/sarscov2/illumina/bam/test.single_end.bam" diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..24b031fc --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' addParams( options: [:] ) + +workflow test_picard_collecthsmetrics { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + bait_intervals = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) + target_intervals = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) + + PICARD_COLLECTHSMETRICS ( input, fasta, fai, bait_intervals, target_intervals ) +} diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml new file mode 100644 index 00000000..8c610abd --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -0,0 +1,8 @@ +- name: picard collecthsmetrics test_picard_collecthsmetrics + command: nextflow run tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c tests/config/nextflow.config + tags: + - picard + - picard/collecthsmetrics + files: + # The file can't be md5'd consistently + - path: output/picard/test_collecthsmetrics.txt From 8b4bfb12bb95930feafaf7b019c9cf82e2a1f0b2 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Wed, 10 Nov 2021 11:27:52 +0100 Subject: [PATCH 219/314] Add log to output from bbmap/align (#1050) --- modules/bbmap/align/main.nf | 4 +++- tests/modules/bbmap/align/test.yml | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 733fd4d5..40810575 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -24,6 +24,7 @@ process BBMAP_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.log"), emit: log path "versions.yml" , emit: versions script: @@ -51,7 +52,8 @@ process BBMAP_ALIGN { out=${prefix}.bam \\ $options.args \\ threads=$task.cpus \\ - -Xmx${task.memory.toGiga()}g + -Xmx${task.memory.toGiga()}g \\ + &> ${prefix}.bbmap.log cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index 0fcc8ce9..a30713c9 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -6,6 +6,7 @@ files: - path: output/bbmap/test.bam md5sum: e0ec7f1eec537acf146fac1cbdd868d1 + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config @@ -15,6 +16,7 @@ files: - path: output/bbmap/test.bam md5sum: 345a72a0d58366d75dd263b107caa460 + - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config @@ -24,6 +26,7 @@ files: - path: output/bbmap/test.bam md5sum: 95f690636581ce9b27cf8568c715ae4d + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config @@ -33,3 +36,4 @@ files: - path: output/bbmap/test.bam md5sum: 441c4f196b9a82c7b224903538064308 + - path: output/bbmap/test.bbmap.log From 24707f2144aff3e966827376f37fb990fe0aa92e Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Wed, 10 Nov 2021 13:21:12 +0100 Subject: [PATCH 220/314] Fix read indexing in AdapterRemoval module (#1051) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * fix read index * update test.yml Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/adapterremoval/main.nf | 4 ++-- tests/modules/adapterremoval/test.yml | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index fad3963f..6d559826 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -49,7 +49,7 @@ process ADAPTERREMOVAL { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ + --file2 ${reads[1]} \\ $options.args \\ --basename $prefix \\ --threads $task.cpus \\ @@ -68,7 +68,7 @@ process ADAPTERREMOVAL { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ + --file2 ${reads[1]} \\ --collapse \\ $options.args \\ --basename $prefix \\ diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 95cd4b04..318e7866 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -13,12 +13,12 @@ tags: - adapterremoval files: - - path: output/adapterremoval/test.pair2.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 - path: output/adapterremoval/test.log - md5sum: bea86105aff4d27fe29c83e24498fefa + md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 - path: output/adapterremoval/test.pair1.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 + md5sum: 294a6277f0139bd597e57c6fa31f39c7 + - path: output/adapterremoval/test.pair2.trimmed.fastq.gz + md5sum: de7b38e2c881bced8671acb1ab452d78 - name: adapterremoval test_adapterremoval_paired_end_collapse command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config @@ -26,6 +26,6 @@ - adapterremoval files: - path: output/adapterremoval/test.log - md5sum: 97cb97b3d03123ac88430768b2e36c59 + md5sum: 7f0b2328152226e46101a535cce718b3 - path: output/adapterremoval/test.merged.fastq.gz - md5sum: 50a4f9fdac6a24e211eb4dcf9f292bef + md5sum: 07a8f725bfd3ecbeabdc41b32d898dee From 64006e239a5e9fedd2224b54cd93bd796785173f Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 10 Nov 2021 16:26:34 +0100 Subject: [PATCH 221/314] fix: actually do the tests for multiple files (#1058) --- tests/modules/gatk4/markduplicates/main.nf | 6 +++--- tests/modules/gatk4/markduplicates/test.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/modules/gatk4/markduplicates/main.nf b/tests/modules/gatk4/markduplicates/main.nf index b9709dc0..f80c1bd5 100644 --- a/tests/modules/gatk4/markduplicates/main.nf +++ b/tests/modules/gatk4/markduplicates/main.nf @@ -14,9 +14,9 @@ workflow test_gatk4_markduplicates { workflow test_gatk4_markduplicates_multiple_bams { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - ] + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + ] ] GATK4_MARKDUPLICATES ( input ) } diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 99296ca4..66921e45 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -17,7 +17,7 @@ - gatk4 files: - path: output/gatk4/test.bai - md5sum: 93cebe29e7cca2064262b739235cca9b + md5sum: d12be29abba5865b7da0cd23f1a84e86 - path: output/gatk4/test.bam - md5sum: dcd6f584006b04141fb787001a8ecacc + md5sum: e988925ed850f8d9d966aa6689ae57de - path: output/gatk4/test.metrics From 56d5eb983463e74047acc2e1c81346715519c7fb Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Wed, 10 Nov 2021 17:40:07 +0100 Subject: [PATCH 222/314] Add Bacillus fragilis alignments to `test_data.config` (#1054) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * add bacillus fragilis alignments Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- tests/config/test_data.config | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6cb494f7..c34696f2 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -267,6 +267,12 @@ params { test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + test1_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.bam" + test1_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam" + test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" + test2_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.bam" + test2_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam" + test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" } 'nanopore' { test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" From 4bd530135fe2a78bdfec2d710b9d294fb447c245 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Wed, 10 Nov 2021 11:25:50 -0700 Subject: [PATCH 223/314] add ngmaster module (#1024) * add ngmaster module * add docker container Co-authored-by: Gregor Sturm --- modules/ngmaster/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/ngmaster/main.nf | 41 +++++++++++++++++ modules/ngmaster/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ngmaster/main.nf | 13 ++++++ tests/modules/ngmaster/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/ngmaster/functions.nf create mode 100644 modules/ngmaster/main.nf create mode 100644 modules/ngmaster/meta.yml create mode 100644 tests/modules/ngmaster/main.nf create mode 100644 tests/modules/ngmaster/test.yml diff --git a/modules/ngmaster/functions.nf b/modules/ngmaster/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ngmaster/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf new file mode 100644 index 00000000..1897b5f3 --- /dev/null +++ b/modules/ngmaster/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NGMASTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ngmaster=0.5.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + ngmaster \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) + END_VERSIONS + """ +} diff --git a/modules/ngmaster/meta.yml b/modules/ngmaster/meta.yml new file mode 100644 index 00000000..1dbb02a0 --- /dev/null +++ b/modules/ngmaster/meta.yml @@ -0,0 +1,43 @@ +name: ngmaster +description: Serotyping Neisseria gonorrhoeae assemblies +keywords: + - fasta + - Neisseria gonorrhoeae + - serotype +tools: + - ngmaster: + description: In silico multi-antigen sequence typing for Neisseria gonorrhoeae (NG-MAST) + homepage: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + documentation: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/ngmaster + doi: "10.1099/mgen.0.000076" + licence: ['GPL v3 only'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8dfe67cf..8c169fcd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -860,6 +860,10 @@ nextclade: - modules/nextclade/** - tests/modules/nextclade/** +ngmaster: + - modules/ngmaster/** + - tests/modules/ngmaster/** + optitype: - modules/optitype/** - tests/modules/optitype/** diff --git a/tests/modules/ngmaster/main.nf b/tests/modules/ngmaster/main.nf new file mode 100644 index 00000000..8bc975ed --- /dev/null +++ b/tests/modules/ngmaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NGMASTER } from '../../../modules/ngmaster/main.nf' addParams( options: [:] ) + +workflow test_ngmaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + NGMASTER ( input ) +} diff --git a/tests/modules/ngmaster/test.yml b/tests/modules/ngmaster/test.yml new file mode 100644 index 00000000..31584a54 --- /dev/null +++ b/tests/modules/ngmaster/test.yml @@ -0,0 +1,7 @@ +- name: ngmaster test_ngmaster + command: nextflow run tests/modules/ngmaster -entry test_ngmaster -c tests/config/nextflow.config + tags: + - ngmaster + files: + - path: output/ngmaster/test.tsv + md5sum: cf674474eaf8ac6abfcebce0af0226cf From 3b600af50eae8264960df817277cfe303d2acd47 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Thu, 11 Nov 2021 08:58:59 +0100 Subject: [PATCH 224/314] feat: update gatk4 from 4.2.0.0 to 4.2.3.0 (#1059) * feat: update gatk4 from 4.2.0.0 to 4.2.3.0 * update md5checksum * commit all files * actually checksum was good, but I suspect something fishy with the tests --- modules/gatk4/applybqsr/main.nf | 6 +++--- modules/gatk4/baserecalibrator/main.nf | 6 +++--- modules/gatk4/bedtointervallist/main.nf | 6 +++--- modules/gatk4/calculatecontamination/main.nf | 6 +++--- modules/gatk4/createsequencedictionary/main.nf | 6 +++--- modules/gatk4/createsomaticpanelofnormals/main.nf | 6 +++--- modules/gatk4/estimatelibrarycomplexity/main.nf | 6 +++--- modules/gatk4/fastqtosam/main.nf | 6 +++--- modules/gatk4/filtermutectcalls/main.nf | 6 +++--- modules/gatk4/genomicsdbimport/main.nf | 6 +++--- modules/gatk4/getpileupsummaries/main.nf | 6 +++--- modules/gatk4/haplotypecaller/main.nf | 6 +++--- modules/gatk4/intervallisttools/main.nf | 6 +++--- modules/gatk4/learnreadorientationmodel/main.nf | 6 +++--- modules/gatk4/markduplicates/main.nf | 6 +++--- modules/gatk4/mergebamalignment/main.nf | 6 +++--- modules/gatk4/mergevcfs/main.nf | 6 +++--- modules/gatk4/mutect2/main.nf | 6 +++--- modules/gatk4/revertsam/main.nf | 6 +++--- modules/gatk4/samtofastq/main.nf | 6 +++--- modules/gatk4/splitncigarreads/main.nf | 6 +++--- modules/gatk4/variantfiltration/main.nf | 6 +++--- tests/modules/gatk4/applybqsr/test.yml | 6 +++--- tests/modules/gatk4/calculatecontamination/test.yml | 8 ++++---- tests/modules/gatk4/createsomaticpanelofnormals/test.yml | 2 +- tests/modules/gatk4/fastqtosam/test.yml | 4 ++-- tests/modules/gatk4/filtermutectcalls/test.yml | 6 +++--- tests/modules/gatk4/getpileupsummaries/test.yml | 4 ++-- tests/modules/gatk4/markduplicates/test.yml | 6 +++--- tests/modules/gatk4/mergebamalignment/test.yml | 2 +- tests/modules/gatk4/mergevcfs/test.yml | 4 ++-- tests/modules/gatk4/revertsam/test.yml | 2 +- tests/modules/gatk4/splitncigarreads/test.yml | 2 +- tests/subworkflows/nf-core/gatk_create_som_pon/test.yml | 2 +- 34 files changed, 90 insertions(+), 90 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 508a29ca..e1a4d7b4 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -11,11 +11,11 @@ process GATK4_APPLYBQSR { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 85c30daf..ff9eb1f9 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -11,11 +11,11 @@ process GATK4_BASERECALIBRATOR { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 064247cc..7c06ccef 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -11,11 +11,11 @@ process GATK4_BEDTOINTERVALLIST { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index bfe9b8fd..28dd7ccf 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -11,11 +11,11 @@ process GATK4_CALCULATECONTAMINATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 12372bdf..db28e244 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -11,11 +11,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 66dfda23..b3685171 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -11,11 +11,11 @@ process GATK4_CREATESOMATICPANELOFNORMALS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index 4cea7086..bfaeedbc 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -11,11 +11,11 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.2.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.2.0--hdfd78af_1" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.2.0--hdfd78af_1" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index ebd081ac..5879618d 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -11,11 +11,11 @@ process GATK4_FASTQTOSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 5a784677..b54e07ed 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -11,11 +11,11 @@ process GATK4_FILTERMUTECTCALLS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index 78c6b81f..c5582563 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -11,11 +11,11 @@ process GATK4_GENOMICSDBIMPORT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 09449f12..7919678c 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -11,11 +11,11 @@ process GATK4_GETPILEUPSUMMARIES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 4bddbb6d..1e540d17 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -11,11 +11,11 @@ process GATK4_HAPLOTYPECALLER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 2f464919..5da651b9 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -11,11 +11,11 @@ process GATK4_INTERVALLISTTOOLS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--hdfd78af_1" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--hdfd78af_1" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index 0a499def..b8aee764 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -11,11 +11,11 @@ process GATK4_LEARNREADORIENTATIONMODEL { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index b1ff5222..e44f4bfc 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -11,11 +11,11 @@ process GATK4_MARKDUPLICATES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 0c9fe5ee..9c5fe26c 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -11,11 +11,11 @@ process GATK4_MERGEBAMALIGNMENT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index ce9a52c3..28073fcb 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -11,11 +11,11 @@ process GATK4_MERGEVCFS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 7999eec3..748b1673 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -11,11 +11,11 @@ process GATK4_MUTECT2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index b3c9085a..7b5ee696 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -11,11 +11,11 @@ process GATK4_REVERTSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 324f3bae..843c61ce 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -11,11 +11,11 @@ process GATK4_SAMTOFASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 793cc671..01b1d05a 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -11,11 +11,11 @@ process GATK4_SPLITNCIGARREADS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 28084645..a4e950ae 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -11,11 +11,11 @@ process GATK4_VARIANTFILTRATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index ed89c6ff..02448b02 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 87a2eabae2b7b41574f966612b5addae + md5sum: af56f5dd81b95070079d54670507f530 - name: gatk4 applybqsr test_gatk4_applybqsr_intervals command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config @@ -14,7 +14,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 9c015d3c1dbd9eee793b7386f432b6aa + md5sum: 0cbfa4be143e988d56ce741b5077510e - name: gatk4 applybqsr test_gatk4_applybqsr_cram command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config @@ -23,4 +23,4 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 02f84815fdbc99c21c8d42ebdcabbbf7 + md5sum: 720ef7453fc3c9def18bbe396062346c diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 8736bc32..89d419e0 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config @@ -14,7 +14,7 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config @@ -23,6 +23,6 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - path: output/gatk4/test.segmentation.table - md5sum: 478cb4f69ec001944b9cd0e7e4de01ef + md5sum: 91f28bfe4727a3256810927fc5eba92f diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index d3e6c537..d71059ad 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -6,4 +6,4 @@ files: - path: output/gatk4/test.pon.vcf.gz - path: output/gatk4/test.pon.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index f6597b66..b576075a 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4/fastqtosam files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index b17a306c..d5b97d36 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -8,7 +8,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config @@ -20,7 +20,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config @@ -32,4 +32,4 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml index 88cca794..6c5e1f84 100644 --- a/tests/modules/gatk4/getpileupsummaries/test.yml +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -5,7 +5,7 @@ - gatk4/getpileupsummaries files: - path: output/gatk4/test.pileups.table - md5sum: 00f92a8f7282d6129f1aca04e2c7d968 + md5sum: 0d19674bef2ff0700d5b02b3463dd210 - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4/getpileupsummaries files: - path: output/gatk4/test.pileups.table - md5sum: 00f92a8f7282d6129f1aca04e2c7d968 + md5sum: 0d19674bef2ff0700d5b02b3463dd210 diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 66921e45..f4345bc4 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -7,7 +7,7 @@ - path: output/gatk4/test.bai md5sum: e9c125e82553209933883b4fe2b8d7c2 - path: output/gatk4/test.bam - md5sum: bda9a7bf5057f2288ed70be3eb8a753f + md5sum: f94271007c1ec8e56adfdd8e45a07bd0 - path: output/gatk4/test.metrics - name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams @@ -17,7 +17,7 @@ - gatk4 files: - path: output/gatk4/test.bai - md5sum: d12be29abba5865b7da0cd23f1a84e86 + md5sum: bad71df9c876e72a5bc0a3e0fd755f92 - path: output/gatk4/test.bam - md5sum: e988925ed850f8d9d966aa6689ae57de + md5sum: e0462bd4fe2cf4beda71e1bd2c66235b - path: output/gatk4/test.metrics diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index 190a9391..4fb98e3d 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -5,4 +5,4 @@ - gatk4/mergebamalignment files: - path: output/gatk4/test.bam - md5sum: bd4a5e2ea916826aadebb5878333e26f + md5sum: e6f1b343700b7ccb94e81ae127433988 diff --git a/tests/modules/gatk4/mergevcfs/test.yml b/tests/modules/gatk4/mergevcfs/test.yml index 4458f969..884738b0 100644 --- a/tests/modules/gatk4/mergevcfs/test.yml +++ b/tests/modules/gatk4/mergevcfs/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.vcf.gz - md5sum: ff48f175e26db2d4b2957762f6d1c715 + md5sum: 5b289bda88d3a3504f2e19ee8cff177c - name: gatk4 mergevcfs test_gatk4_mergevcfs_refdict command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4 files: - path: output/gatk4/test.vcf.gz - md5sum: ff48f175e26db2d4b2957762f6d1c715 + md5sum: 5b289bda88d3a3504f2e19ee8cff177c diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index 3f0969c4..c65d3666 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -5,4 +5,4 @@ - gatk4/revertsam files: - path: output/gatk4/test.reverted.bam - md5sum: f778310b18b83b49929eb648594f96dc + md5sum: f783a88deb45c3a2c20ca12cbe1c5652 diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index d6827db9..146cd329 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -5,4 +5,4 @@ - gatk4/splitncigarreads files: - path: output/gatk4/test.bam - md5sum: 8d05a41f9467e62d3fc1bc725f0869ec + md5sum: bfe6d04a4072f97fdb97dbc502c9d3e2 diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index 7c9e7ac0..a4478044 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -31,7 +31,7 @@ contains: - "FORMAT= Date: Thu, 11 Nov 2021 14:19:26 +0000 Subject: [PATCH 225/314] Update module: `pbccs` (#1018) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: fill contains args * 👌 IMPROVE: One output => One Channel * 👌 IMPROVE: One input => One channel * 🐛 FIX: Update tests * 🐛 FIX: Remove TODOs from test.yaml * 👌 IMPROVE: Revert and keep bam and pbi together * 🐛 FIX: Remove old rq input from meta.yml * 👌 IMPROVE: Update test to match input channels * 👌 IMPROVE: use prefix for for output file name * 👌 IMPROVE: Update to new versions.yml * 👌 IMPROVE: Update pbccs from v6.0.0 to v6.0.2 * 👌 IMPROVE: Keep track of the former sample id in meta * Update modules/pbccs/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: remove former_id from meta * 👌 IMPROVE: Use chunk number in output filename * 🐛 FIX: Update meta.yml * 🐛 FIX: Update reports filenames with chunk number. Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/pbccs/main.nf | 6 ++--- modules/pbccs/meta.yml | 22 +++++++++++++++--- ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 0 -> 3558 bytes tests/modules/pbccs/test.yml | 10 ++++---- 4 files changed, 27 insertions(+), 11 deletions(-) create mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 7e70ac14..55eacd76 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -37,9 +37,9 @@ process PBCCS { ccs \\ $bam \\ ${prefix}.chunk${chunk_num}.bam \\ - --report-file ${prefix}.report.txt \\ - --report-json ${prefix}.report.json \\ - --metrics-json ${prefix}.metrics.json.gz \\ + --report-file ${prefix}.chunk${chunk_num}.report.txt \\ + --report-json ${prefix}.chunk${chunk_num}.report.json \\ + --metrics-json ${prefix}.chunk${chunk_num}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ $options.args diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index 38f31496..f55c0d71 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -42,10 +42,26 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - css: + - bam: type: file - description: Consensus sequences - pattern: "*.ccs.bam" + description: CCS sequences in bam format + pattern: "*.bam" + - pbi: + type: file + description: PacBio Index of CCS sequences + pattern: "*.pbi" + - report_txt: + type: file + description: Summary of CCS in txt format + pattern: ".txt" + - report_json: + type: file + description: Summary of CCS in txt json + pattern: ".json" + - metrics: + type: file + description: Metrics about zmws + pattern: "*.json.gz" authors: - "@sguizard" diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33acb8369a1bc62b5e66e1ed80e2247dd0e2759f GIT binary patch literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Thu, 11 Nov 2021 14:57:55 +0000 Subject: [PATCH 226/314] Update: `gstama/collapse` (#1057) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Remove junk files * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: delete unnecessary files * 👌 IMPROVE: Update + clean - Remove unnecessary files - Update to new versions.yml file - Better output channels * 👌 IMPROVE: Update meta.yml and output channels * 👌 IMPROVE: Remove useless files * 👌 IMPROVE: Remove automatic MODE setup * 👌 IMPROVE: Applied @jfy133 code modification suggestions * Update modules/gstama/collapse/meta.yml Co-authored-by: James A. Fellows Yates * 🐛 FIX: Add missing fasta option in meta.yml * 🐛 FIX: Fix typo * 🐛 FIX: Update package version * Update main.nf * Update meta.yml * Update modules/gstama/collapse/meta.yml * Apply suggestions from code review * Update tests/modules/gstama/collapse/main.nf * Update main.nf * 👌 IMPROVE: Update to gs-tama 1.0.3 * Update modules/gstama/collapse/main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/gstama/collapse/main.nf | 9 +++++---- tests/modules/gstama/collapse/test.yml | 4 +++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index d4167b5e..8fc7877f 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -11,11 +11,12 @@ process GSTAMA_COLLAPSE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) + conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0" } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" + container "quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0" + } input: @@ -23,7 +24,7 @@ process GSTAMA_COLLAPSE { path fasta output: - tuple val(meta), path("*.bed") , emit: bed + tuple val(meta), path("*_collapsed.bed") , emit: bed tuple val(meta), path("*_trans_read.bed") , emit: bed_trans_reads tuple val(meta), path("*_local_density_error.txt"), emit: local_density_error tuple val(meta), path("*_polya.txt") , emit: polya diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml index 98de6bb3..3815a156 100644 --- a/tests/modules/gstama/collapse/test.yml +++ b/tests/modules/gstama/collapse/test.yml @@ -4,7 +4,7 @@ - gstama - gstama/collapse files: - - path: output/gstama/test_tc.bed + - path: output/gstama/test_tc_collapsed.bed md5sum: e5105198ed970a33ae0ecaa7bff421d9 - path: output/gstama/test_tc_local_density_error.txt md5sum: b917ac1f14eccd590b6881a686f324d5 @@ -18,5 +18,7 @@ md5sum: 0ca1a32f33ef05242d897d913802554b - path: output/gstama/test_tc_trans_report.txt md5sum: 33a86c15ca2acce36b2a5962f4c1adc4 + - path: output/gstama/test_tc_varcov.txt + md5sum: 587fd899ff658eb66b1770a35283bfcb - path: output/gstama/test_tc_variants.txt md5sum: 5b1165e9f33faba4f7207013fc27257e From 94851901d548ee879d94ab4f4a2c2496bab04715 Mon Sep 17 00:00:00 2001 From: alexandregilardet <63741852+alexandregilardet@users.noreply.github.com> Date: Sat, 13 Nov 2021 15:09:14 +0000 Subject: [PATCH 227/314] add new module pmdtools/filter #847 (#963) * commit but won't be used because pmdtools should have a submodule * added submodule pmdtools/filter * removed pmdtools module created before deciding to design two submodules * oops forgot to remove a TODO * removed white space meta.yml, removed v in version and manually added submodule /filter to test * Update pytest_modules.yml * Update main.nf added split_cpus for multi-tools module resources * Update test.yml added .pmd extension to match modules/ main.nf * Update test.yml update md5sum * Update singularity and docker build in main.nf From build 4 to 5 in order to match the conda one * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update main.nf adding samtools version we need both pmdtools and samtools versions * Update main.nf remove .pmd extension * Update test.yml md5sum Because file extension changed Co-authored-by: James A. Fellows Yates --- modules/pmdtools/filter/functions.nf | 78 ++++++++++++++++++++++++++ modules/pmdtools/filter/main.nf | 60 ++++++++++++++++++++ modules/pmdtools/filter/meta.yml | 55 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pmdtools/filter/main.nf | 15 +++++ tests/modules/pmdtools/filter/test.yml | 8 +++ 6 files changed, 220 insertions(+) create mode 100644 modules/pmdtools/filter/functions.nf create mode 100644 modules/pmdtools/filter/main.nf create mode 100644 modules/pmdtools/filter/meta.yml create mode 100644 tests/modules/pmdtools/filter/main.nf create mode 100644 tests/modules/pmdtools/filter/test.yml diff --git a/modules/pmdtools/filter/functions.nf b/modules/pmdtools/filter/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/pmdtools/filter/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..3e363a9c --- /dev/null +++ b/modules/pmdtools/filter/main.nf @@ -0,0 +1,60 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PMDTOOLS_FILTER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pmdtools=0.60" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5" + } else { + container "quay.io/biocontainers/pmdtools:0.60--hdfd78af_5" + } + + input: + tuple val(meta), path(bam), path (bai) + val(threshold) + path(reference) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def split_cpus = Math.floor(task.cpus/2) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" + //threshold and header flags activate filtering function of pmdtools + """ + samtools \\ + calmd \\ + $bam \\ + $reference \\ + $options.args \\ + -@ ${split_cpus} \\ + | pmdtools \\ + --threshold $threshold \\ + --header \\ + $options.args2 \\ + | samtools \\ + view \\ + $options.args3 \\ + -Sb \\ + - \\ + -@ ${split_cpus} \\ + -o ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + pmdtools: \$( pmdtools --version | cut -f2 -d ' ' | sed 's/v//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/pmdtools/filter/meta.yml b/modules/pmdtools/filter/meta.yml new file mode 100644 index 00000000..72abbfdc --- /dev/null +++ b/modules/pmdtools/filter/meta.yml @@ -0,0 +1,55 @@ +name: pmdtools_filter +description: pmdtools command to filter ancient DNA molecules from others +keywords: + - pmdtools + - aDNA + - filter + - damage +tools: + - pmdtools: + description: Compute postmortem damage patterns and decontaminate ancient genomes + homepage: https://github.com/pontussk/PMDtools + documentation: https://github.com/pontussk/PMDtools + tool_dev_url: https://github.com/pontussk/PMDtools + doi: "10.1073/pnas.1318934111" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - threshold: + type: value + description: Post-mortem damage score threshold + - reference: + type: file + description: FASTA file + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Filtered BAM file + pattern: "*.bam" + +authors: + - "@alexandregilardet" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8c169fcd..22a3edf5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -956,6 +956,10 @@ plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +pmdtools/filter: + - modules/pmdtools/filter/** + - tests/modules/pmdtools/filter/** + porechop: - modules/porechop/** - tests/modules/porechop/** diff --git a/tests/modules/pmdtools/filter/main.nf b/tests/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..c4832bbb --- /dev/null +++ b/tests/modules/pmdtools/filter/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' addParams( options: [:] ) + +workflow test_pmdtools_filter { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]] + threshold = 3 + reference = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + PMDTOOLS_FILTER ( input, threshold, reference ) +} diff --git a/tests/modules/pmdtools/filter/test.yml b/tests/modules/pmdtools/filter/test.yml new file mode 100644 index 00000000..9171b02e --- /dev/null +++ b/tests/modules/pmdtools/filter/test.yml @@ -0,0 +1,8 @@ +- name: pmdtools filter + command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c tests/config/nextflow.config + tags: + - pmdtools + - pmdtools/filter + files: + - path: output/pmdtools/test.bam + md5sum: 0fa64cb87d0439d4482938a4b6990b9d From 9475960928a3ba49624b49ef2b48438a4696ed0f Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 06:26:01 -0500 Subject: [PATCH 228/314] Bwa index (#1040) * fix a bug that the prefix is not handled by index. * build the test.yml Co-authored-by: Harshil Patel --- modules/bwa/index/main.nf | 4 ++-- tests/modules/bwa/index/test.yml | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index 479431ed..db1911cb 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -31,8 +31,8 @@ process BWA_INDEX { bwa \\ index \\ $options.args \\ - $fasta \\ - -p bwa/${fasta.baseName} + -p bwa/${fasta.baseName} \\ + $fasta cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index cdcb5e53..3fe8663d 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -1,16 +1,16 @@ -- name: bwa index - command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config +- name: bwa index test_bwa_index + command: nextflow run tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config tags: - bwa - bwa/index files: - - path: ./output/bwa/bwa/genome.bwt - md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/bwa/bwa/genome.amb + - path: output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/bwa/bwa/genome.sa - md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - - path: ./output/bwa/bwa/genome.pac - md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/bwa/bwa/genome.ann + - path: output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 From 171a2a2dbf6008cf00b0aac6b981f182aba968c7 Mon Sep 17 00:00:00 2001 From: Daniel Straub <42973691+d4straub@users.noreply.github.com> Date: Mon, 15 Nov 2021 12:48:56 +0100 Subject: [PATCH 229/314] Unicycler with long read input (#1041) * Unicycler with long read input * tests and md5sums * remove unstable md5sums * Update modules/unicycler/main.nf Co-authored-by: Harshil Patel --- modules/unicycler/main.nf | 18 +++++++++++------- modules/unicycler/meta.yml | 13 +++++++++---- tests/modules/unicycler/main.nf | 18 +++++++++++++++--- tests/modules/unicycler/test.yml | 27 +++++++++++++++++++-------- 4 files changed, 54 insertions(+), 22 deletions(-) diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 2f7c49d6..3629d730 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -19,26 +19,30 @@ process UNICYCLER { } input: - tuple val(meta), path(reads) + tuple val(meta), path(shortreads), path(longreads) output: - tuple val(meta), path('*.scaffolds.fa'), emit: scaffolds - tuple val(meta), path('*.assembly.gfa'), emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz'), emit: scaffolds + tuple val(meta), path('*.assembly.gfa.gz'), emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" + def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" + def long_reads = longreads ? "-l $longreads" : "" """ unicycler \\ --threads $task.cpus \\ $options.args \\ - $input_reads \\ + $short_reads \\ + $long_reads \\ --out ./ mv assembly.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa mv assembly.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa mv unicycler.log ${prefix}.unicycler.log cat <<-END_VERSIONS > versions.yml diff --git a/modules/unicycler/meta.yml b/modules/unicycler/meta.yml index e3b1aab9..b04ac882 100644 --- a/modules/unicycler/meta.yml +++ b/modules/unicycler/meta.yml @@ -19,11 +19,15 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - shortreads: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, + List of input Illumina FastQ files of size 1 and 2 for single-end and paired-end data, respectively. + - longreads: + type: file + description: | + List of input FastQ files of size 1, PacBio or Nanopore long reads. output: - meta: type: map @@ -37,11 +41,11 @@ output: - scaffolds: type: file description: Fasta file containing scaffolds - pattern: "*.{scaffolds.fa}" + pattern: "*.{scaffolds.fa.gz}" - gfa: type: file description: gfa file containing assembly - pattern: "*.{assembly.gfa}" + pattern: "*.{assembly.gfa.gz}" - log: type: file description: unicycler log file @@ -53,3 +57,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/tests/modules/unicycler/main.nf b/tests/modules/unicycler/main.nf index 993310a1..5352fc8b 100644 --- a/tests/modules/unicycler/main.nf +++ b/tests/modules/unicycler/main.nf @@ -6,7 +6,8 @@ include { UNICYCLER } from '../../../modules/unicycler/main.nf' addParams( optio workflow test_unicycler_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true) ], + [] ] UNICYCLER ( input ) @@ -14,8 +15,19 @@ workflow test_unicycler_single_end { workflow test_unicycler_paired_end { input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [] + ] + + UNICYCLER ( input ) +} + +workflow test_unicycler_shortreads_longreads { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true) ] ] UNICYCLER ( input ) diff --git a/tests/modules/unicycler/test.yml b/tests/modules/unicycler/test.yml index f12cc1ba..124ac3e2 100644 --- a/tests/modules/unicycler/test.yml +++ b/tests/modules/unicycler/test.yml @@ -1,21 +1,32 @@ -- name: unicycler single-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_single_end + command: nextflow run tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" -- name: unicycler paired-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_paired_end + command: nextflow run tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz + - path: output/unicycler/test.unicycler.log + contains: + - "Assembly complete" + +- name: unicycler test_unicycler_shortreads_longreads + command: nextflow run tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c tests/config/nextflow.config + tags: + - unicycler + files: + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" From 661bdb645eb6dfef3dc3dd1a998bab5ce6f0706e Mon Sep 17 00:00:00 2001 From: Daniel Straub <42973691+d4straub@users.noreply.github.com> Date: Mon, 15 Nov 2021 12:53:07 +0100 Subject: [PATCH 230/314] Change Spades input (#1039) * Change spades module * correct meta map description * adjust memory handling * remove trailing whitespace * fix hmm input * Update modules/spades/main.nf Co-authored-by: Harshil Patel --- modules/spades/main.nf | 35 +++++++++++++-------- modules/spades/meta.yml | 22 ++++++++++++-- tests/modules/spades/main.nf | 33 ++++++++++++++++++-- tests/modules/spades/test.yml | 57 ++++++++++++++++++++++++++--------- 4 files changed, 115 insertions(+), 32 deletions(-) diff --git a/modules/spades/main.nf b/modules/spades/main.nf index c21066e2..836efbda 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -19,46 +19,57 @@ process SPADES { } input: - tuple val(meta), path(reads) + tuple val(meta), path(illumina), path(pacbio), path(nanopore) path hmm output: - tuple val(meta), path('*.scaffolds.fa') , optional:true, emit: scaffolds - tuple val(meta), path('*.contigs.fa') , optional:true, emit: contigs - tuple val(meta), path('*.transcripts.fa') , optional:true, emit: transcripts - tuple val(meta), path('*.gene_clusters.fa'), optional:true, emit: gene_clusters - tuple val(meta), path('*.assembly.gfa') , optional:true, emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz') , optional:true, emit: scaffolds + tuple val(meta), path('*.contigs.fa.gz') , optional:true, emit: contigs + tuple val(meta), path('*.transcripts.fa.gz') , optional:true, emit: transcripts + tuple val(meta), path('*.gene_clusters.fa.gz'), optional:true, emit: gene_clusters + tuple val(meta), path('*.assembly.gfa.gz') , optional:true, emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" - def custom_hmms = params.spades_hmm ? "--custom-hmms $hmm" : "" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def maxmem = task.memory.toGiga() + def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" + def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" + def nanopore_reads = nanopore ? "--nanopore $nanopore" : "" + def custom_hmms = hmm ? "--custom-hmms $hmm" : "" """ spades.py \\ $options.args \\ --threads $task.cpus \\ + --memory $maxmem \\ $custom_hmms \\ - $input_reads \\ + $illumina_reads \\ + $pacbio_reads \\ + $nanopore_reads \\ -o ./ mv spades.log ${prefix}.spades.log if [ -f scaffolds.fasta ]; then mv scaffolds.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa fi if [ -f contigs.fasta ]; then mv contigs.fasta ${prefix}.contigs.fa + gzip -n ${prefix}.contigs.fa fi if [ -f transcripts.fasta ]; then mv transcripts.fasta ${prefix}.transcripts.fa + gzip -n ${prefix}.transcripts.fa fi if [ -f assembly_graph_with_scaffolds.gfa ]; then mv assembly_graph_with_scaffolds.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa fi if [ -f gene_clusters.fasta ]; then mv gene_clusters.fasta ${prefix}.gene_clusters.fa + gzip -n ${prefix}.gene_clusters.fa fi cat <<-END_VERSIONS > versions.yml diff --git a/modules/spades/meta.yml b/modules/spades/meta.yml index 3d5943ae..b6878d3d 100644 --- a/modules/spades/meta.yml +++ b/modules/spades/meta.yml @@ -20,11 +20,20 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - illumina: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + List of input FastQ (Illumina or PacBio CCS reads) files + of size 1 and 2 for single-end and paired-end data, + respectively. This input data type is required. + - pacbio: + type: file + description: | + List of input PacBio CLR FastQ files of size 1. + - nanopore: + type: file + description: | + List of input FastQ files of size 1, originating from Oxford Nanopore technology. - hmm: type: file description: @@ -39,26 +48,32 @@ output: type: file description: | Fasta file containing scaffolds + pattern: "*.fa.gz" - contigs: type: file description: | Fasta file containing contigs + pattern: "*.fa.gz" - transcripts: type: file description: | Fasta file containing transcripts + pattern: "*.fa.gz" - gene_clusters: type: file description: | Fasta file containing gene_clusters + pattern: "*.fa.gz" - gfa: type: file description: | gfa file containing assembly + pattern: "*.gfa.gz" - log: type: file description: | Spades log file + pattern: "*.log" - versions: type: file description: File containing software versions @@ -67,3 +82,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/tests/modules/spades/main.nf b/tests/modules/spades/main.nf index a8518a0e..b09a4266 100644 --- a/tests/modules/spades/main.nf +++ b/tests/modules/spades/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SPADES } from '../../../modules/spades/main.nf' addParams( spades_hmm: false ,options: ['args': '--rnaviral'] ) +include { SPADES } from '../../../modules/spades/main.nf' addParams( options: ['args': '--rnaviral'] ) workflow test_spades_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] ] SPADES ( input, [] ) } @@ -14,7 +16,32 @@ workflow test_spades_single_end { workflow test_spades_paired_end { input = [ [ id:'test', single_end:false ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] + ] + + SPADES ( input, [] ) +} + +workflow test_spades_illumina_nanopore { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + SPADES ( input, [] ) +} + +// that isnt perfect, because CCS reads should rather be used with -s instead of --pacbio +workflow test_spades_illumina_pacbio { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['pacbio']['ccs_fq_gz'], checkIfExists: true) ], + [] ] SPADES ( input, [] ) diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index 35beb1a7..a400e79d 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -1,23 +1,52 @@ -- name: spades single end - command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config +- name: spades test_spades_single_end + command: nextflow run tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: a995d1d413031534180d2b3b715fa921 - - path: output/spades/test.contigs.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 - - path: output/spades/test.scaffolds.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 + - path: output/spades/test.assembly.gfa.gz + md5sum: e5eab229363a906954a07df00e2495a6 + - path: output/spades/test.contigs.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 - path: output/spades/test.spades.log -- name: spades paired end - command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config +- name: spades test_spades_paired_end + command: nextflow run tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: bb053ef4e9250829c980ca17fbdbe3e9 - - path: output/spades/test.contigs.fa - md5sum: 4476d409da70d9f7fc2aa8f25bbaf7fd + - path: output/spades/test.assembly.gfa.gz + md5sum: c8614fb69907ae832a1359a054af240f + - path: output/spades/test.contigs.fa.gz + md5sum: eab5165b3cda96c235aaa1388010cb27 - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_nanopore + command: nextflow run tests/modules/spades -entry test_spades_illumina_nanopore -c tests/config/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e438534f14e107f005efdd659adeba6a + - path: output/spades/test.contigs.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.scaffolds.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_pacbio + command: nextflow run tests/modules/spades -entry test_spades_illumina_pacbio -c tests/config/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e12aaf83d8dbfc313339b7636ba43447 + - path: output/spades/test.contigs.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.spades.log + - path: output/spades/warnings.log From c48244b677d597cf1dbf15bd4a3354b43b4d585d Mon Sep 17 00:00:00 2001 From: Ilya Pletenev <56674821+i-pletenev@users.noreply.github.com> Date: Mon, 15 Nov 2021 15:51:40 +0300 Subject: [PATCH 231/314] Add new module 'ataqv/ataqv' (#998) * Add new module 'ataqv/ataqv' * Update main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/ataqv/ataqv/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/ataqv/ataqv/main.nf | 56 +++++++++++++++++++++ modules/ataqv/ataqv/meta.yml | 66 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ataqv/ataqv/main.nf | 69 ++++++++++++++++++++++++++ tests/modules/ataqv/ataqv/test.yml | 51 +++++++++++++++++++ 6 files changed, 324 insertions(+) create mode 100644 modules/ataqv/ataqv/functions.nf create mode 100644 modules/ataqv/ataqv/main.nf create mode 100644 modules/ataqv/ataqv/meta.yml create mode 100644 tests/modules/ataqv/ataqv/main.nf create mode 100644 tests/modules/ataqv/ataqv/test.yml diff --git a/modules/ataqv/ataqv/functions.nf b/modules/ataqv/ataqv/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ataqv/ataqv/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..5ddade28 --- /dev/null +++ b/modules/ataqv/ataqv/main.nf @@ -0,0 +1,56 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ATAQV_ATAQV { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ataqv=1.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2" + } else { + container "quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2" + } + + input: + tuple val(meta), path(bam), path(bai), path(peak_file) + val organism + path tss_file + path excl_regs_file + path autosom_ref_file + + output: + tuple val(meta), path("*.ataqv.json"), emit: json + tuple val(meta), path("*.problems") , emit: problems, optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def peak = peak_file ? "--peak-file $peak_file" : '' + def tss = tss_file ? "--tss-file $tss_file" : '' + def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' + def autosom_ref = autosom_ref_file ? "--autosomal-reference-file $autosom_ref_file" : '' + """ + ataqv \\ + $options.args \\ + $peak \\ + $tss \\ + $excl_regs \\ + $autosom_ref \\ + --metrics-file "${prefix}.ataqv.json" \\ + --threads $task.cpus \\ + --name $prefix \\ + $organism \\ + $bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( ataqv --version ) + END_VERSIONS + """ +} diff --git a/modules/ataqv/ataqv/meta.yml b/modules/ataqv/ataqv/meta.yml new file mode 100644 index 00000000..760bf95f --- /dev/null +++ b/modules/ataqv/ataqv/meta.yml @@ -0,0 +1,66 @@ +name: ataqv_ataqv +description: ataqv function of a corresponding ataqv tool +keywords: + - ataqv +tools: + - ataqv: + description: ataqv is a toolkit for measuring and comparing ATAC-seq results. It was written to help understand how well ATAC-seq assays have worked, and to make it easier to spot differences that might be caused by library prep or sequencing. + homepage: https://github.com/ParkerLab/ataqv/blob/master/README.rst + documentation: https://github.com/ParkerLab/ataqv/blob/master/README.rst + tool_dev_url: https://github.com/ParkerLab/ataqv + doi: "https://doi.org/10.1016/j.cels.2020.02.009" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file with the same prefix as bam file. Required if tss_file input is provided. + pattern: "*.bam.bai" + - peak_file: + type: file + description: A BED file of peaks called for alignments in the BAM file + pattern: "*.bed" + - organism: + type: string + description: The subject of the experiment, which determines the list of autosomes (see "Reference Genome Configuration" section at https://github.com/ParkerLab/ataqv). + - tss_file: + type: file + description: A BED file of transcription start sites for the experiment organism. If supplied, a TSS enrichment score will be calculated according to the ENCODE data standards. This calculation requires that the BAM file of alignments be indexed. + pattern: "*.bed" + - excl_regs_file: + type: file + description: A BED file containing excluded regions. Peaks or TSS overlapping these will be ignored. + pattern: "*.bed" + - autosom_ref_file: + type: file + description: A file containing autosomal reference names, one per line. The names must match the reference names in the alignment file exactly, or the metrics based on counts of autosomal alignments will be wrong. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - json: + type: file + description: The JSON file to which metrics will be written. + - problems: + type: file + description: If given, problematic reads will be logged to a file per read group, with names derived from the read group IDs, with ".problems" appended. If no read groups are found, the reads will be written to one file named after the BAM file. + pattern: "*.problems" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@i-pletenev" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 22a3edf5..de72731b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -34,6 +34,10 @@ assemblyscan: - modules/assemblyscan/** - tests/modules/assemblyscan/** +ataqv/ataqv: + - modules/ataqv/ataqv/** + - tests/modules/ataqv/ataqv/** + bamaligncleaner: - modules/bamaligncleaner/** - tests/modules/bamaligncleaner/** diff --git a/tests/modules/ataqv/ataqv/main.nf b/tests/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..2f2a62eb --- /dev/null +++ b/tests/modules/ataqv/ataqv/main.nf @@ -0,0 +1,69 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: [:] ) +include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: ['args': '--log-problematic-reads'] ) + +workflow test_ataqv_ataqv { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_problem_reads { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV_PROBLEM_READS ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_peak { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_tss { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, [], [] ) +} + +workflow test_ataqv_ataqv_excluded_regs { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + excl_regs_file = file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, excl_regs_file, [] ) +} diff --git a/tests/modules/ataqv/ataqv/test.yml b/tests/modules/ataqv/ataqv/test.yml new file mode 100644 index 00000000..77452f6f --- /dev/null +++ b/tests/modules/ataqv/ataqv/test.yml @@ -0,0 +1,51 @@ +- name: ataqv ataqv test_ataqv_ataqv + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_problem_reads + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/1.problems + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_peak + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_tss + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_excluded_regs + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' From b012b349c1ef324f1e72fd474b941e2e249b8a65 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Mon, 15 Nov 2021 13:55:18 +0000 Subject: [PATCH 232/314] New module: `plink2/vcf` (#1006) * implement plink2 VCF import * fix yaml indentation Co-authored-by: Chris Cheshire --- modules/plink2/vcf/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/plink2/vcf/main.nf | 43 +++++++++++++++++ modules/plink2/vcf/meta.yml | 52 +++++++++++++++++++++ tests/modules/plink2/vcf/main.nf | 13 ++++++ tests/modules/plink2/vcf/test.yml | 12 +++++ 5 files changed, 198 insertions(+) create mode 100644 modules/plink2/vcf/functions.nf create mode 100644 modules/plink2/vcf/main.nf create mode 100644 modules/plink2/vcf/meta.yml create mode 100644 tests/modules/plink2/vcf/main.nf create mode 100644 tests/modules/plink2/vcf/test.yml diff --git a/modules/plink2/vcf/functions.nf b/modules/plink2/vcf/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/plink2/vcf/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..869a5587 --- /dev/null +++ b/modules/plink2/vcf/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PLINK2_VCF { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1" + } else { + container "quay.io/biocontainers/plink2:2.00a2.3--h712d239_1" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.pgen"), emit: pgen + tuple val(meta), path("*.psam"), emit: psam + tuple val(meta), path("*.pvar"), emit: pvar + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + plink2 \\ + $options.args \\ + --vcf $vcf \\ + --out ${prefix} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/plink2/vcf/meta.yml b/modules/plink2/vcf/meta.yml new file mode 100644 index 00000000..1b2f3a9b --- /dev/null +++ b/modules/plink2/vcf/meta.yml @@ -0,0 +1,52 @@ +name: plink2_vcf +description: Import variant genetic data using plink2 +keywords: + - plink2 + - import +tools: + - plink2: + description: | + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner + homepage: http://www.cog-genomics.org/plink/2.0/ + documentation: http://www.cog-genomics.org/plink/2.0/general_usage + tool_dev_url: None + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Variant calling file (vcf) + pattern: "*.{vcf}, *.{vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - pgen: + type: file + description: PLINK 2 binary genotype table + pattern: "*.{pgen}" + - psam: + type: file + description: PLINK 2 sample information file + pattern: "*.{psam}" + - pvar: + type: file + description: PLINK 2 variant information file + pattern: "*.{psam}" + +authors: + - "@nebfield" diff --git a/tests/modules/plink2/vcf/main.nf b/tests/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..409e7995 --- /dev/null +++ b/tests/modules/plink2/vcf/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' addParams( options: [args:'--allow-extra-chr'] ) + +workflow test_plink2_vcf { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + PLINK2_VCF ( input ) +} diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml new file mode 100644 index 00000000..3f0cddc6 --- /dev/null +++ b/tests/modules/plink2/vcf/test.yml @@ -0,0 +1,12 @@ +- name: plink2 vcf test_plink2_vcf + command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config + tags: + - plink2/vcf + - plink2 + files: + - path: output/plink2/test.pgen + md5sum: d66d3cd4a6c9cca1a4073d7f4b277041 + - path: output/plink2/test.psam + md5sum: dc3b77d7753a7bed41734323e3549b10 + - path: output/plink2/test.pvar + md5sum: d61e53f847a6335138b584216b4e45d0 From 2c3c87a10fae962da73f2007a3041c7d581f66c6 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 15 Nov 2021 15:26:06 +0100 Subject: [PATCH 233/314] Add `leehom` module (#1052) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add leeHom module * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: FriederikeHanssen --- modules/leehom/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/leehom/main.nf | 85 +++++++++++++++++++++++++++++++++ modules/leehom/meta.yml | 77 +++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 21 ++++---- tests/modules/leehom/main.nf | 36 ++++++++++++++ tests/modules/leehom/test.yml | 43 +++++++++++++++++ 6 files changed, 331 insertions(+), 9 deletions(-) create mode 100644 modules/leehom/functions.nf create mode 100644 modules/leehom/main.nf create mode 100644 modules/leehom/meta.yml create mode 100644 tests/modules/leehom/main.nf create mode 100644 tests/modules/leehom/test.yml diff --git a/modules/leehom/functions.nf b/modules/leehom/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/leehom/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf new file mode 100644 index 00000000..e0d9ee39 --- /dev/null +++ b/modules/leehom/main.nf @@ -0,0 +1,85 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION="1.2.15" + +process LEEHOM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::leehom=1.2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1" + } else { + container "quay.io/biocontainers/leehom:1.2.15--h29e30f7_1" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("${prefix}.bam") , optional: true, emit: bam + tuple val(meta), path("${prefix}.fq.gz") , optional: true, emit: fq_pass + tuple val(meta), path("${prefix}.fail.fq.gz") , optional: true, emit: fq_fail + tuple val(meta), path("${prefix}_r1.fq.gz") , optional: true, emit: unmerged_r1_fq_pass + tuple val(meta), path("${prefix}_r1.fail.fq.gz"), optional: true, emit: unmerged_r1_fq_fail + tuple val(meta), path("${prefix}_r2.fq.gz") , optional: true, emit: unmerged_r2_fq_pass + tuple val(meta), path("${prefix}_r2.fail.fq.gz"), optional: true, emit: unmerged_r2_fq_fail + tuple val(meta), path("*.log") , emit: log + + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if ( reads.toString().endsWith('.bam') ) { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -o ${prefix}.bam \\ + --log ${prefix}.log \\ + $reads + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } else if ( meta.single_end ) { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -fq1 $reads \\ + -fqo ${prefix} \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } else { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -fq1 ${reads[0]} \\ + -fq2 ${reads[1]} \\ + -fqo ${prefix} \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } +} diff --git a/modules/leehom/meta.yml b/modules/leehom/meta.yml new file mode 100644 index 00000000..b0d6092a --- /dev/null +++ b/modules/leehom/meta.yml @@ -0,0 +1,77 @@ +name: leehom +description: Bayesian reconstruction of ancient DNA fragments +keywords: + - ancient DNA + - adapter removal + - clipping + - trimming + - merging + - collapsing + - preprocessing + - bayesian +tools: + - leehom: + description: Bayesian reconstruction of ancient DNA fragments + homepage: "https://grenaud.github.io/leeHom/" + documentation: "https://github.com/grenaud/leeHom" + tool_dev_url: "https://github.com/grenaud/leeHom" + doi: "10.1093/nar/gku699" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Unaligned BAM or one or two gzipped FASTQ file(s) + pattern: "*.{bam,fq.gz,fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file + pattern: "*.bam" + - fq_pass: + type: file + description: Trimmed and merged FASTQ + pattern: "*.fq.gz" + - fq_fail: + type: file + description: Failed trimmed and merged FASTQs + pattern: "*.fail.fq.gz" + - unmerged_r1_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r1.fq.gz" + - unmerged_r1_fq_fail: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r1.fail.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r2.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r2.fail.fq.gz" + - log: + type: file + description: Log file of command + pattern: "*.log" + + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index de72731b..e64e67f9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -621,14 +621,14 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** -imputeme/vcftoprs: - - modules/imputeme/vcftoprs/** - - tests/modules/imputeme/vcftoprs/** - idr: - modules/idr/** - tests/modules/idr/** +imputeme/vcftoprs: + - modules/imputeme/vcftoprs/** + - tests/modules/imputeme/vcftoprs/** + iqtree: - modules/iqtree/** - tests/modules/iqtree/** @@ -718,6 +718,10 @@ last/train: - modules/last/train/** - tests/modules/last/train/** +leehom: + - modules/leehom/** + - tests/modules/leehom/** + lima: - modules/lima/** - tests/modules/lima/** @@ -786,6 +790,10 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +meningotype: + - modules/meningotype/** + - tests/modules/meningotype/** + metabat2/jgisummarizebamcontigdepths: - modules/metabat2/jgisummarizebamcontigdepths/** - tests/modules/metabat2/jgisummarizebamcontigdepths/** @@ -794,11 +802,6 @@ metabat2/metabat2: - modules/metabat2/metabat2/** - tests/modules/metabat2/metabat2/** -meningotype: - - modules/meningotype/** - - tests/modules/meningotype/** - - metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/leehom/main.nf b/tests/modules/leehom/main.nf new file mode 100644 index 00000000..2fe6f12f --- /dev/null +++ b/tests/modules/leehom/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LEEHOM } from '../../../modules/leehom/main.nf' addParams( options: [:] ) +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: [args: "-f4 -b"] ) + +workflow test_leehom_bam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + fasta = [] + + SAMTOOLS_VIEW ( input, fasta ) + LEEHOM ( SAMTOOLS_VIEW.out.bam ) +} + +workflow test_leehom_se_fq { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + LEEHOM ( input ) +} + +workflow test_leehom_pe_fq { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] + + LEEHOM ( input ) +} diff --git a/tests/modules/leehom/test.yml b/tests/modules/leehom/test.yml new file mode 100644 index 00000000..8a9f083e --- /dev/null +++ b/tests/modules/leehom/test.yml @@ -0,0 +1,43 @@ +- name: leehom test_leehom_bam + command: nextflow run tests/modules/leehom -entry test_leehom_bam -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.bam + md5sum: 19a1bf95714523868791f1d4d3aaee73 + - path: output/leehom/test.log + md5sum: d1f5da273eb69f41babda510797c7671 + - path: output/samtools/test.bam + md5sum: 25d13b3b31b147bb3836dea9932c38dd + +- name: leehom test_leehom_se_fq + command: nextflow run tests/modules/leehom -entry test_leehom_se_fq -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: ed10c4bbf5c3082ca68823535b91e1e2 + - path: output/leehom/test.log + md5sum: 59aa280cb72dfbea05ba913cb89db143 + +- name: leehom test_leehom_pe_fq + command: nextflow run tests/modules/leehom -entry test_leehom_pe_fq -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: 84929b78e3f89371ecd3b4c915b9ec33 + - path: output/leehom/test.log + md5sum: 800b5a88dc0822886bfbb271029e2a4a + - path: output/leehom/test_r1.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r1.fq.gz + md5sum: e9258420fa712e8536106995a7d1d97a + - path: output/leehom/test_r2.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r2.fq.gz + md5sum: 27230bcc5eae81ec5c1701798d39c1af From 7be60774b6562b27ee07f246ba293fc4bff074c4 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:35:55 -0700 Subject: [PATCH 234/314] add module for tbprofiler (#947) * add module for tbprofiler * Update test.yml * Update meta.yml Co-authored-by: Abhinav Sharma Co-authored-by: FriederikeHanssen --- modules/tbprofiler/profile/functions.nf | 78 +++++++++++++++++++++++ modules/tbprofiler/profile/main.nf | 48 ++++++++++++++ modules/tbprofiler/profile/meta.yml | 59 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/tbprofiler/profile/main.nf | 24 +++++++ tests/modules/tbprofiler/profile/test.yml | 21 ++++++ 6 files changed, 234 insertions(+) create mode 100644 modules/tbprofiler/profile/functions.nf create mode 100644 modules/tbprofiler/profile/main.nf create mode 100644 modules/tbprofiler/profile/meta.yml create mode 100644 tests/modules/tbprofiler/profile/main.nf create mode 100644 tests/modules/tbprofiler/profile/test.yml diff --git a/modules/tbprofiler/profile/functions.nf b/modules/tbprofiler/profile/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/tbprofiler/profile/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..afd78b05 --- /dev/null +++ b/modules/tbprofiler/profile/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process TBPROFILER_PROFILE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::tb-profiler=3.0.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0" + } else { + container "quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("bam/*.bam") , emit: bam + tuple val(meta), path("results/*.csv") , emit: csv, optional: true + tuple val(meta), path("results/*.json"), emit: json + tuple val(meta), path("results/*.txt") , emit: txt, optional: true + tuple val(meta), path("vcf/*.vcf.gz") , emit: vcf + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" + """ + tb-profiler \\ + profile \\ + $options.args \\ + --prefix ${prefix} \\ + --threads $task.cpus \\ + $input_reads + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') + END_VERSIONS + """ +} diff --git a/modules/tbprofiler/profile/meta.yml b/modules/tbprofiler/profile/meta.yml new file mode 100644 index 00000000..0cac6d6b --- /dev/null +++ b/modules/tbprofiler/profile/meta.yml @@ -0,0 +1,59 @@ +name: tbprofiler_profile +description: A tool to detect resistance and lineages of M. tuberculosis genomes +keywords: + - Mycobacterium tuberculosis + - resistance + - serotype +tools: + - tbprofiler: + description: Profiling tool for Mycobacterium tuberculosis to detect drug resistance and lineage from WGS data + homepage: https://github.com/jodyphelan/TBProfiler + documentation: https://jodyphelan.gitbook.io/tb-profiler/ + tool_dev_url: https://github.com/jodyphelan/TBProfiler + doi: "10.1186/s13073-019-0650-x" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file with alignment details + pattern: "*.bam" + - csv: + type: file + description: Optional CSV formated result file of resistance and strain type + pattern: "*.csv" + - json: + type: file + description: JSON formated result file of resistance and strain type + pattern: "*.json" + - txt: + type: file + description: Optional text file of resistance and strain type + pattern: "*.txt" + - vcf: + type: file + description: VCF with variant info again refernce genomes + pattern: "*.vcf" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e64e67f9..73d3c19b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1251,6 +1251,10 @@ tabix/tabix: - modules/tabix/tabix/** - tests/modules/tabix/tabix/** +tbprofiler/profile: + - modules/tbprofiler/profile/** + - tests/modules/tbprofiler/profile/** + tiddit/cov: - modules/tiddit/cov/** - tests/modules/tiddit/cov/** diff --git a/tests/modules/tbprofiler/profile/main.nf b/tests/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..e0c6ef56 --- /dev/null +++ b/tests/modules/tbprofiler/profile/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_ILLUMINA } from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform illumina'] ) +include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_NANOPORE} from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform nanopore'] ) + +workflow test_tbprofiler_profile_illumina { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + + TBPROFILER_PROFILE_ILLUMINA ( input ) +} + + +workflow test_tbprofiler_profile_nanopore { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + + TBPROFILER_PROFILE_NANOPORE ( input ) +} diff --git a/tests/modules/tbprofiler/profile/test.yml b/tests/modules/tbprofiler/profile/test.yml new file mode 100644 index 00000000..abfb552d --- /dev/null +++ b/tests/modules/tbprofiler/profile/test.yml @@ -0,0 +1,21 @@ +- name: tbprofiler profile illumina + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c tests/config/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz + +- name: tbprofiler profile nanopore + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c tests/config/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz From b5b3ff16ce6ab062ed3a1191a4b0b68ec0eeaa12 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:41:36 -0700 Subject: [PATCH 235/314] add module for fastq-scan (#935) * add module for fastq-scan * change fastq to reads * remove uncompressed support Co-authored-by: Gregor Sturm Co-authored-by: FriederikeHanssen --- modules/fastqscan/functions.nf | 78 ++++++++++++++++++++++++++++++++ modules/fastqscan/main.nf | 40 ++++++++++++++++ modules/fastqscan/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fastqscan/main.nf | 13 ++++++ tests/modules/fastqscan/test.yml | 7 +++ 6 files changed, 185 insertions(+) create mode 100644 modules/fastqscan/functions.nf create mode 100644 modules/fastqscan/main.nf create mode 100644 modules/fastqscan/meta.yml create mode 100644 tests/modules/fastqscan/main.nf create mode 100644 tests/modules/fastqscan/test.yml diff --git a/modules/fastqscan/functions.nf b/modules/fastqscan/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fastqscan/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf new file mode 100644 index 00000000..0106892f --- /dev/null +++ b/modules/fastqscan/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FASTQSCAN { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fastq-scan=0.4.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0" + } else { + container "quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + zcat $reads | \\ + fastq-scan \\ + $options.args > ${prefix}.json + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) + END_VERSIONS + """ +} diff --git a/modules/fastqscan/meta.yml b/modules/fastqscan/meta.yml new file mode 100644 index 00000000..99538b5a --- /dev/null +++ b/modules/fastqscan/meta.yml @@ -0,0 +1,43 @@ +name: fastqscan +description: FASTQ summary statistics in JSON format +keywords: + - fastq + - summary + - statistics +tools: + - fastqscan: + description: FASTQ summary statistics in JSON format + homepage: https://github.com/rpetit3/fastq-scan + documentation: https://github.com/rpetit3/fastq-scan + tool_dev_url: https://github.com/rpetit3/fastq-scan + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: JSON formatted file of summary statistics + pattern: "*.json" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 73d3c19b..9362b10d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -394,6 +394,10 @@ fastqc: - modules/fastqc/** - tests/modules/fastqc/** +fastqscan: + - modules/fastqscan/** + - tests/modules/fastqscan/** + fasttree: - modules/fasttree/** - tests/modules/fasttree/** diff --git a/tests/modules/fastqscan/main.nf b/tests/modules/fastqscan/main.nf new file mode 100644 index 00000000..5fd824f6 --- /dev/null +++ b/tests/modules/fastqscan/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' addParams( options: [ args: "-g 30000"] ) + +workflow test_fastqscan { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + FASTQSCAN ( input ) +} diff --git a/tests/modules/fastqscan/test.yml b/tests/modules/fastqscan/test.yml new file mode 100644 index 00000000..80bcbc47 --- /dev/null +++ b/tests/modules/fastqscan/test.yml @@ -0,0 +1,7 @@ +- name: fastqscan test_fastqscan + command: nextflow run tests/modules/fastqscan -entry test_fastqscan -c tests/config/nextflow.config + tags: + - fastqscan + files: + - path: output/fastqscan/test.json + md5sum: b9d59a36fe85e556b5a80573ea0b0266 From 51f2d9a806f5b7c64dfea8c3d5a5abfa7513fcf8 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:47:41 -0700 Subject: [PATCH 236/314] Update main.nf (#938) Co-authored-by: FriederikeHanssen --- modules/spatyper/main.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index ce320bfc..34207dbf 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -31,7 +31,6 @@ process SPATYPER { def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ - env spaTyper \\ $options.args \\ $input_args \\ From 73a09850fb159af232a80ec10539642ba44956f6 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 08:06:02 -0700 Subject: [PATCH 237/314] Update agrvate version (#970) Co-authored-by: FriederikeHanssen --- modules/agrvate/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index c1a6748e..c45bbe06 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -11,11 +11,11 @@ process AGRVATE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::agrvate=1.0.1" : null) + conda (params.enable_conda ? "bioconda::agrvate=1.0.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0.1--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0" } else { - container "quay.io/biocontainers/agrvate:1.0.1--hdfd78af_0" + container "quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0" } input: From a6ca2b006b9eb4f1a07098966867c3c5fea42c51 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 10:18:43 -0500 Subject: [PATCH 238/314] Cooler merge (#515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add software/cooler * fix the wrong files uploaded. * create a branch for cooler/merge * remove the bin_size from metadata. * update the test_data to test-datasets * update pytest_modules.yml * update the test file from single input file to two input file. update the output file from hdf5 to bedpe. * update the version.txt to version.yml and functions.nf * change version.yml to versions * update the test file path and fix the output versions. * Update meta.yml Correct "version" to "versions" * Update main.nf Fix typo * Update main.nf Remove some spaces Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard --- modules/cooler/merge/functions.nf | 78 +++++++++++++++++++++++++++++ modules/cooler/merge/main.nf | 41 +++++++++++++++ modules/cooler/merge/meta.yml | 41 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 ++ tests/modules/cooler/merge/main.nf | 16 ++++++ tests/modules/cooler/merge/test.yml | 8 +++ 7 files changed, 192 insertions(+) create mode 100644 modules/cooler/merge/functions.nf create mode 100644 modules/cooler/merge/main.nf create mode 100644 modules/cooler/merge/meta.yml create mode 100644 tests/modules/cooler/merge/main.nf create mode 100644 tests/modules/cooler/merge/test.yml diff --git a/modules/cooler/merge/functions.nf b/modules/cooler/merge/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/merge/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf new file mode 100644 index 00000000..b15439a4 --- /dev/null +++ b/modules/cooler/merge/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_MERGE { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + cooler merge \\ + $options.args \\ + ${prefix}.cool \\ + ${cool} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/merge/meta.yml b/modules/cooler/merge/meta.yml new file mode 100644 index 00000000..f5c0a733 --- /dev/null +++ b/modules/cooler/merge/meta.yml @@ -0,0 +1,41 @@ +name: cooler_merge +description: Merge multiple coolers with identical axes +keywords: + - merge +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Path to COOL file + pattern: "*.cool" + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9362b10d..6fcf33dd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,6 +294,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +cooler/merge: + - modules/cooler/merge/** + - tests/modules/cooler/merge/** + csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index c34696f2..6504783c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -255,6 +255,10 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } + 'cooler' { + test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" + test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" + } } 'bacteroides_fragilis'{ 'genome' { diff --git a/tests/modules/cooler/merge/main.nf b/tests/modules/cooler/merge/main.nf new file mode 100644 index 00000000..564660c5 --- /dev/null +++ b/tests/modules/cooler/merge/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' addParams( options: [publish_files:[:]] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_merge { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true)] + ] + + COOLER_MERGE ( input ).cool | COOLER_DUMP +} diff --git a/tests/modules/cooler/merge/test.yml b/tests/modules/cooler/merge/test.yml new file mode 100644 index 00000000..3ac388e7 --- /dev/null +++ b/tests/modules/cooler/merge/test.yml @@ -0,0 +1,8 @@ +- name: cooler merge test_cooler_merge + command: nextflow run tests/modules/cooler/merge -entry test_cooler_merge -c tests/config/nextflow.config + tags: + - cooler/merge + - cooler + files: + - path: output/cooler/test.bedpe + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 From 1a4c7cec1b9d82fdaa15897d8e9a9e9a4767444d Mon Sep 17 00:00:00 2001 From: santiagorevale Date: Mon, 15 Nov 2021 15:43:06 +0000 Subject: [PATCH 239/314] New modules added: issues #200 and #310 (#884) * New modules added: issues #200 and #310 * Update main.nf * Update meta.yml * Update tests/modules/gatk4/genotypegvcfs/main.nf * Apply suggestions from code review * Update main.nf * Updating tests for GenomicsDB input and adding the path for this test resource to test_data.config * Some minor changes on one of the test files I forgot to include Co-authored-by: Harshil Patel Co-authored-by: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> --- modules/gatk4/genotypegvcfs/functions.nf | 78 ++++++++ modules/gatk4/genotypegvcfs/main.nf | 54 ++++++ modules/gatk4/genotypegvcfs/meta.yml | 69 +++++++ modules/gatk4/indexfeaturefile/functions.nf | 78 ++++++++ modules/gatk4/indexfeaturefile/main.nf | 40 ++++ modules/gatk4/indexfeaturefile/meta.yml | 42 ++++ tests/config/pytest_modules.yml | 8 + tests/config/test_data.config | 2 + tests/modules/gatk4/genotypegvcfs/main.nf | 180 ++++++++++++++++++ tests/modules/gatk4/genotypegvcfs/test.yml | 80 ++++++++ tests/modules/gatk4/indexfeaturefile/main.nf | 45 +++++ tests/modules/gatk4/indexfeaturefile/test.yml | 39 ++++ 12 files changed, 715 insertions(+) create mode 100644 modules/gatk4/genotypegvcfs/functions.nf create mode 100644 modules/gatk4/genotypegvcfs/main.nf create mode 100644 modules/gatk4/genotypegvcfs/meta.yml create mode 100644 modules/gatk4/indexfeaturefile/functions.nf create mode 100644 modules/gatk4/indexfeaturefile/main.nf create mode 100644 modules/gatk4/indexfeaturefile/meta.yml create mode 100644 tests/modules/gatk4/genotypegvcfs/main.nf create mode 100644 tests/modules/gatk4/genotypegvcfs/test.yml create mode 100644 tests/modules/gatk4/indexfeaturefile/main.nf create mode 100644 tests/modules/gatk4/indexfeaturefile/test.yml diff --git a/modules/gatk4/genotypegvcfs/functions.nf b/modules/gatk4/genotypegvcfs/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf new file mode 100644 index 00000000..6fbbe663 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -0,0 +1,54 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_GENOTYPEGVCFS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(gvcf), path(gvcf_index) + path fasta + path fasta_index + path fasta_dict + path dbsnp + path dbsnp_index + path intervals_bed + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" + def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" + def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + """ + gatk \\ + GenotypeGVCFs \\ + $options.args \\ + $interval_options \\ + $dbsnp_options \\ + -R $fasta \\ + -V $gvcf_options \\ + -O ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/genotypegvcfs/meta.yml b/modules/gatk4/genotypegvcfs/meta.yml new file mode 100644 index 00000000..cd7457a7 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/meta.yml @@ -0,0 +1,69 @@ +name: gatk4_genotypegvcfs +description: | + Perform joint genotyping on one or more samples pre-called with HaplotypeCaller. +keywords: + - joint genotyping + - genotype + - gvcf +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gvcf: + type: tuple of files + description: | + Tuple of gVCF(.gz) file (first) and its index (second) or the path to a GenomicsDB (and empty) + pattern: ["*.{vcf,vcf.gz}", "*.{idx,tbi}"] + - fasta: + type: file + description: Reference fasta file + pattern: "*.fasta" + - fasta_index: + type: file + description: Reference fasta index file + pattern: "*.fai" + - fasta_dict: + type: file + description: Reference fasta sequence dict file + pattern: "*.dict" + - dbsnp: + type: file + description: dbSNP VCF file + pattern: "*.vcf.gz" + - dbsnp_index: + type: tuple of files + description: dbSNP VCF index file + pattern: "*.tbi" + - intervals_bed: + type: file + description: An intevals BED file + pattern: "*.bed" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Genotyped VCF file + pattern: "*.vcf.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/modules/gatk4/indexfeaturefile/functions.nf b/modules/gatk4/indexfeaturefile/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf new file mode 100644 index 00000000..8f40a3e3 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_INDEXFEATUREFILE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(feature_file) + + output: + tuple val(meta), path("*.{tbi,idx}"), emit: index + path "versions.yml" , emit: versions + + script: + """ + gatk \\ + IndexFeatureFile \\ + $options.args \\ + -I $feature_file + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/indexfeaturefile/meta.yml b/modules/gatk4/indexfeaturefile/meta.yml new file mode 100644 index 00000000..eebe6b85 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/meta.yml @@ -0,0 +1,42 @@ +name: gatk4_indexfeaturefile +description: Creates an index for a feature file, e.g. VCF or BED file. +keywords: + - index + - feature +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - feature_file: + type: file + description: VCF/BED file + pattern: "*.{vcf,vcf.gz,bed,bed.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - index: + type: file + description: Index for VCF/BED file + pattern: "*.{tbi,idx}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6fcf33dd..34085bcd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,6 +474,10 @@ gatk4/genomicsdbimport: - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** +gatk4/genotypegvcfs: + - modules/gatk4/genotypegvcfs/** + - tests/modules/gatk4/genotypegvcfs/** + gatk4/getpileupsummaries: - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -482,6 +486,10 @@ gatk4/haplotypecaller: - modules/gatk4/haplotypecaller/** - tests/modules/gatk4/haplotypecaller/** +gatk4/indexfeaturefile: + - modules/gatk4/indexfeaturefile/** + - tests/modules/gatk4/indexfeaturefile/** + gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6504783c..7538046d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -191,6 +191,8 @@ params { test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf new file mode 100644 index 00000000..0b555180 --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -0,0 +1,180 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' addParams( options: [suffix:'.genotyped'] ) +include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) + +// Basic parameters with uncompressed VCF input +workflow test_gatk4_genotypegvcfs_vcf_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters with compressed VCF input +workflow test_gatk4_genotypegvcfs_gz_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters + optional dbSNP +workflow test_gatk4_genotypegvcfs_gz_input_dbsnp { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, [] ) +} + +// Basic parameters + optional intervals +workflow test_gatk4_genotypegvcfs_gz_input_intervals { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], intervalsBed ) +} + +// Basic parameters + optional dbSNP + optional intervals +workflow test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, intervalsBed ) +} + +// Basic parameters with GenomicsDB input +workflow test_gatk4_genotypegvcfs_gendb_input { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters with GenomicsDB + optional dbSNP +workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, [] ) +} + +// Basic parameters with GenomicsDB + optional intervals +workflow test_gatk4_genotypegvcfs_gendb_input_intervals { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], intervalsBed ) +} + +// Basic parameters with GenomicsDB + optional dbSNP + optional intervals +workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, intervalsBed ) +} diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml new file mode 100644 index 00000000..ad39a48d --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -0,0 +1,80 @@ +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_vcf_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] diff --git a/tests/modules/gatk4/indexfeaturefile/main.nf b/tests/modules/gatk4/indexfeaturefile/main.nf new file mode 100644 index 00000000..e523606a --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/main.nf @@ -0,0 +1,45 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' addParams( options: [:] ) + +workflow test_gatk4_indexfeaturefile_bed { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_bed_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_vcf { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_vcf_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} diff --git a/tests/modules/gatk4/indexfeaturefile/test.yml b/tests/modules/gatk4/indexfeaturefile/test.yml new file mode 100644 index 00000000..5883695a --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/test.yml @@ -0,0 +1,39 @@ +# We can't use an md5sum or check file contents because: +# a) the path to the file is embedded inside it, +# b) the file is binary so we can't check for text inside it. +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/genome.bed.idx + +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed_gz + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/genome.bed.gz.tbi + md5sum: 2eb6ed0a0b049efe4caa1413089dcd74 + +# We can't use an md5sum or check file contents because: +# a) the path to the file is embedded inside it, +# b) the file is binary so we can't check for text inside it. +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/test.genome.vcf.idx + +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf_gz + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/test.genome.vcf.gz.tbi + md5sum: ea03cd1d1f178eefa656787537053c37 From 0754d49db847f6e7bcba25c11fdcfe9f071055af Mon Sep 17 00:00:00 2001 From: Michael J Cipriano <42848032+mjcipriano@users.noreply.github.com> Date: Mon, 15 Nov 2021 10:53:41 -0500 Subject: [PATCH 240/314] Initial commit mummer module (#940) --- modules/mummer/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/mummer/main.nf | 55 +++++++++++++++++++++++ modules/mummer/meta.yml | 48 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mummer/main.nf | 14 ++++++ tests/modules/mummer/test.yml | 7 +++ 6 files changed, 206 insertions(+) create mode 100644 modules/mummer/functions.nf create mode 100644 modules/mummer/main.nf create mode 100644 modules/mummer/meta.yml create mode 100644 tests/modules/mummer/main.nf create mode 100644 tests/modules/mummer/test.yml diff --git a/modules/mummer/functions.nf b/modules/mummer/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mummer/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf new file mode 100644 index 00000000..e46fd799 --- /dev/null +++ b/modules/mummer/main.nf @@ -0,0 +1,55 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '3.23' + +process MUMMER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" + } else { + container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" + } + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + mummer \\ + $options.args \\ + $fasta_name_ref \\ + $fasta_name_query \\ + > ${prefix}.coords + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ +} diff --git a/modules/mummer/meta.yml b/modules/mummer/meta.yml new file mode 100644 index 00000000..5f7a983c --- /dev/null +++ b/modules/mummer/meta.yml @@ -0,0 +1,48 @@ +name: mummer +description: MUMmer is a system for rapidly aligning entire genomes +keywords: + - align + - genome + - fasta +tools: + - mummer: + description: MUMmer is a system for rapidly aligning entire genomes + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: https://doi.org/10.1186/gb-2004-5-2-r12 + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - coords: + type: file + description: File containing coordinates of matches between reference and query sequence + pattern: "*.coords" + +authors: + - "@mjcipriano" + - "@sateeshperi" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 34085bcd..d459d330 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -867,6 +867,10 @@ multiqc: - modules/multiqc/** - tests/modules/multiqc/** +mummer: + - modules/mummer/** + - tests/modules/mummer/** + muscle: - modules/muscle/** - tests/modules/muscle/** diff --git a/tests/modules/mummer/main.nf b/tests/modules/mummer/main.nf new file mode 100644 index 00000000..b24f8b16 --- /dev/null +++ b/tests/modules/mummer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MUMMER } from '../../../modules/mummer/main.nf' addParams( options: [:] ) + +workflow test_mummer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + MUMMER ( input ) +} diff --git a/tests/modules/mummer/test.yml b/tests/modules/mummer/test.yml new file mode 100644 index 00000000..1d368d14 --- /dev/null +++ b/tests/modules/mummer/test.yml @@ -0,0 +1,7 @@ +- name: mummer test_mummer + command: nextflow run tests/modules/mummer -entry test_mummer -c tests/config/nextflow.config + tags: + - mummer + files: + - path: output/mummer/test.coords + md5sum: 6084fe43c7cb2eca8b96d674560bdefc From 4a9bfec61dca9e99b884a2577e273056ff92230c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Mon, 15 Nov 2021 15:57:58 +0000 Subject: [PATCH 241/314] New module: `Ultra` (#871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Update .gitignore * 📦 Add ultra module * 👌 IMPROVE: Update test input * 👌 IMPROVE: Update and clean code - Update to last versions.yml file - Update meta.yml - Correct typos * 👌 IMPROVE: Update output channels + Rename following subtool * 👌 IMPROVE: Remove old ultre files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 👌 IMPROVE: Remove old ultra files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 🐛 Fix: add unsaved modifications * 🐛 FIX: Remove one inconstant md5sum * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Remove md5sums for pickle files (not constant). * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: update output directory, update meta.yml * 👌 IMPROVE: Use modules to gunzip and sort gtf * 🐛 FIX: Set up channel correctly * 👌 IMPROVE: Remove pickles files and databases Those data might be useful in a debugging purpose. * Apply suggestions from code review * Update main.nf Co-authored-by: Harshil Patel --- modules/ultra/pipeline/functions.nf | 78 +++++++++++++++++++++++++++ modules/ultra/pipeline/main.nf | 48 +++++++++++++++++ modules/ultra/pipeline/meta.yml | 50 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ultra/pipeline/main.nf | 25 +++++++++ tests/modules/ultra/pipeline/test.yml | 12 +++++ 6 files changed, 217 insertions(+) create mode 100644 modules/ultra/pipeline/functions.nf create mode 100644 modules/ultra/pipeline/main.nf create mode 100644 modules/ultra/pipeline/meta.yml create mode 100644 tests/modules/ultra/pipeline/main.nf create mode 100644 tests/modules/ultra/pipeline/test.yml diff --git a/modules/ultra/pipeline/functions.nf b/modules/ultra/pipeline/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ultra/pipeline/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..5a5c2c3e --- /dev/null +++ b/modules/ultra/pipeline/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ULTRA_PIPELINE { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + } else { + container "quay.io/biocontainers/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + } + + input: + tuple val(meta), path(reads) + path genome + path gtf + + output: + tuple val(meta), path("*.sam"), emit: sam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + uLTRA \\ + pipeline \\ + --t $task.cpus \\ + --prefix $prefix \\ + $options.args \\ + \$(pwd)/$genome \\ + \$(pwd)/$gtf \\ + \$(pwd)/$reads \\ + ./ + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( uLTRA --version|sed 's/uLTRA //g' ) + END_VERSIONS + """ +} diff --git a/modules/ultra/pipeline/meta.yml b/modules/ultra/pipeline/meta.yml new file mode 100644 index 00000000..d0008cfc --- /dev/null +++ b/modules/ultra/pipeline/meta.yml @@ -0,0 +1,50 @@ +name: ultra_pipeline +description: uLTRA aligner - A wrapper around minimap2 to improve small exon detection +keywords: + - uLTRA + - minimap2 +tools: + - ultra: + description: Splice aligner of long transcriptomic reads to genome. + homepage: https://github.com/ksahlin/uLTRA + documentation: https://github.com/ksahlin/uLTRA + tool_dev_url: https://github.com/ksahlin/uLTRA + doi: "10.1093/bioinformatics/btab540" + licence: ['GNU GPLV3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - genome: + type: file + description: fasta file of reference genome + pattern: "*.fasta" + - gtf: + type: file + description: A annotation of use the genome + pattern: "*.gtf" + - reads: + type: file + description: A fasta or fastq file of reads to align + pattern: "*.{fasta,fastq}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - sam: + type: file + description: The aligned reads in sam format + pattern: "*.sam" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d459d330..794c7f4d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1311,6 +1311,10 @@ ucsc/wigtobigwig: - modules/ucsc/wigtobigwig/** - tests/modules/ucsc/wigtobigwig/** +ultra/pipeline: + - modules/ultra/pipeline/** + - tests/modules/ultra/pipeline/** + unicycler: - modules/unicycler/** - tests/modules/unicycler/** diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..881fe9a7 --- /dev/null +++ b/tests/modules/ultra/pipeline/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' addParams( options: [:] ) +include { GUNZIP } from '../../../../modules/gunzip/main.nf' addParams( options: [:] ) +include { GFFREAD } from '../../../../modules/gffread/main.nf' addParams( options: [args: "--sort-alpha --keep-genes -T", suffix: "_sorted"] ) + +workflow test_ultra_pipeline { + + fastq = file(params.test_data['homo_sapiens']['pacbio']['hifi'] , checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true) + genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + GUNZIP ( fastq ) + GFFREAD ( gtf ) + + GUNZIP + .out + .gunzip + .map { [ [ id:'test', single_end:false ], it ] } + .set { input } + + ULTRA_PIPELINE ( input, genome, GFFREAD.out.gtf ) +} diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml new file mode 100644 index 00000000..fa378e58 --- /dev/null +++ b/tests/modules/ultra/pipeline/test.yml @@ -0,0 +1,12 @@ +- name: ultra pipeline test_ultra_pipeline + command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config + tags: + - ultra/pipeline + - ultra + files: + - path: output/gffread/genome_sorted.gtf + md5sum: c0b034860c679a354cd093109ed90437 + - path: output/gunzip/test_hifi.fastq + md5sum: 20e41c569d5828c1e87337e13a5185d3 + - path: output/ultra/test.sam + md5sum: a37a1f9594a3099522dc1f6a903b2b12 From f93c2f2604e98e44e6fd5110751f80cbcfb2d610 Mon Sep 17 00:00:00 2001 From: mjakobs <25904555+mjakobs@users.noreply.github.com> Date: Mon, 15 Nov 2021 16:01:46 +0000 Subject: [PATCH 242/314] add Krona_db module (#995) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add Krona_db module * removed md5 sum * Update tests/modules/kronatools/kronadb/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * added input information * removed trailing white spaces * changed krona version to 2.7.1 * Apply suggestions from code review * Update modules/kronatools/kronadb/meta.yml * Update modules/kronatools/kronadb/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> --- modules/kronatools/kronadb/functions.nf | 78 +++++++++++++++++++++++ modules/kronatools/kronadb/main.nf | 35 ++++++++++ modules/kronatools/kronadb/meta.yml | 30 +++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/kronatools/kronadb/main.nf | 9 +++ tests/modules/kronatools/kronadb/test.yml | 7 ++ 6 files changed, 163 insertions(+) create mode 100644 modules/kronatools/kronadb/functions.nf create mode 100644 modules/kronatools/kronadb/main.nf create mode 100644 modules/kronatools/kronadb/meta.yml create mode 100644 tests/modules/kronatools/kronadb/main.nf create mode 100644 tests/modules/kronatools/kronadb/test.yml diff --git a/modules/kronatools/kronadb/functions.nf b/modules/kronatools/kronadb/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/kronatools/kronadb/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/kronatools/kronadb/main.nf b/modules/kronatools/kronadb/main.nf new file mode 100644 index 00000000..7dee12d0 --- /dev/null +++ b/modules/kronatools/kronadb/main.nf @@ -0,0 +1,35 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KRONATOOLS_KRONADB { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5" + } else { + container "quay.io/biocontainers/krona:2.7.1--pl526_5" + } + input: + + output: + path 'taxonomy/taxonomy.tab', emit: db + path "versions.yml" , emit: versions + + script: + def VERSION='2.7.1' + """ + ktUpdateTaxonomy.sh taxonomy + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/kronadb/meta.yml b/modules/kronatools/kronadb/meta.yml new file mode 100644 index 00000000..5a637949 --- /dev/null +++ b/modules/kronatools/kronadb/meta.yml @@ -0,0 +1,30 @@ +name: kronatools_kronadb +description: KronaTools Update Taxonomy downloads a taxonomy database +keywords: + - database + - taxonomy + - krona +tools: + - kronatools: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: https://github.com/marbl/Krona/wiki/Installing + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - none: There is no input. This module downloads a pre-built taxonomy database for use with Krona Tools. + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - db: + type: file + description: A TAB separated file that contains a taxonomy database. + pattern: "*.{tab}" + +authors: + - "@mjakobs" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 794c7f4d..fb239baf 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -702,6 +702,10 @@ kraken2/kraken2: - modules/untar/** - tests/modules/kraken2/kraken2/** +kronatools/kronadb: + - modules/kronatools/kronadb/** + - tests/modules/kronatools/kronadb/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** diff --git a/tests/modules/kronatools/kronadb/main.nf b/tests/modules/kronatools/kronadb/main.nf new file mode 100644 index 00000000..90b6e30c --- /dev/null +++ b/tests/modules/kronatools/kronadb/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONATOOLS_KRONADB } from '../../../../modules/kronatools/kronadb/main.nf' addParams( options: [:] ) + +workflow test_kronatools_kronadb { + KRONATOOLS_KRONADB ( ) +} diff --git a/tests/modules/kronatools/kronadb/test.yml b/tests/modules/kronatools/kronadb/test.yml new file mode 100644 index 00000000..3f346a9d --- /dev/null +++ b/tests/modules/kronatools/kronadb/test.yml @@ -0,0 +1,7 @@ +- name: kronatools kronadb test_kronatools_kronadb + command: nextflow run tests/modules/kronatools/kronadb -entry test_kronatools_kronadb -c tests/config/nextflow.config + tags: + - kronatools + - kronatools/kronadb + files: + - path: output/kronatools/taxonomy/taxonomy.tab From 7ad42eae1b24e95f426a03fe544f0fcfe2912cf2 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 11:18:02 -0500 Subject: [PATCH 243/314] Cooler zoomify (#514) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add software/cooler * fix the wrong files uploaded. * create a branch for cooler/zoomify * Apply suggestions from code review * update functions.nf to new version. * update the test file to test-datasets. * update the test method of zoomify * update dump test file. * update version.txt to version.yml * Update modules/cooler/dump/main.nf Co-authored-by: Harshil Patel * fix the output bug of versions update to pytest_modules.yml * update the test file path and fix the output versions. * Update modules/cooler/dump/main.nf * indent Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard Co-authored-by: FriederikeHanssen --- modules/cooler/dump/main.nf | 6 ++- modules/cooler/zoomify/functions.nf | 78 +++++++++++++++++++++++++++ modules/cooler/zoomify/main.nf | 42 +++++++++++++++ modules/cooler/zoomify/meta.yml | 41 ++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/cooler/dump/main.nf | 2 +- tests/modules/cooler/zoomify/main.nf | 14 +++++ tests/modules/cooler/zoomify/test.yml | 8 +++ 9 files changed, 193 insertions(+), 3 deletions(-) create mode 100644 modules/cooler/zoomify/functions.nf create mode 100644 modules/cooler/zoomify/main.nf create mode 100644 modules/cooler/zoomify/meta.yml create mode 100644 tests/modules/cooler/zoomify/main.nf create mode 100644 tests/modules/cooler/zoomify/test.yml diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 2028f5f0..1ca11c7d 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -20,18 +20,20 @@ process COOLER_DUMP { input: tuple val(meta), path(cool) + val resolution output: tuple val(meta), path("*.bedpe"), emit: bedpe - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ $options.args \\ -o ${prefix}.bedpe \\ - $cool + $cool$suffix cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/cooler/zoomify/functions.nf b/modules/cooler/zoomify/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/zoomify/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..3f1ed4e7 --- /dev/null +++ b/modules/cooler/zoomify/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_ZOOMIFY { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.mcool"), emit: mcool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + cooler zoomify \\ + $options.args \\ + -n $task.cpus \\ + -o ${prefix}.mcool \\ + $cool + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/zoomify/meta.yml b/modules/cooler/zoomify/meta.yml new file mode 100644 index 00000000..74bdbf44 --- /dev/null +++ b/modules/cooler/zoomify/meta.yml @@ -0,0 +1,41 @@ +name: cooler_zoomify +description: Generate a multi-resolution cooler file by coarsening +keywords: + - mcool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - mcool: + type: file + description: Output mcool file + pattern: "*.mcool" + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index fb239baf..86e4fe22 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,6 +294,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +cooler/zoomify: + - modules/cooler/zoomify/** + - tests/software/cooler/zoomify/** + cooler/merge: - modules/cooler/merge/** - tests/modules/cooler/merge/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 7538046d..46232ef9 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -260,6 +260,7 @@ params { 'cooler' { test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" + } } 'bacteroides_fragilis'{ diff --git a/tests/modules/cooler/dump/main.nf b/tests/modules/cooler/dump/main.nf index e2a647c5..deeeb21f 100644 --- a/tests/modules/cooler/dump/main.nf +++ b/tests/modules/cooler/dump/main.nf @@ -9,5 +9,5 @@ workflow test_cooler_dump { input = [ [ id:'test' ], // meta map file("https://raw.githubusercontent.com/open2c/cooler/master/tests/data/toy.asymm.16.cool", checkIfExists: true) ] - COOLER_DUMP ( input ) + COOLER_DUMP ( input, [:] ) } diff --git a/tests/modules/cooler/zoomify/main.nf b/tests/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..72c33983 --- /dev/null +++ b/tests/modules/cooler/zoomify/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' addParams( options: ['args':'-r 2,4,8', publish_files:[:]] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_zoomify { + input = [ [ id:'test' ], // meta map + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true)] + + COOLER_ZOOMIFY ( input ) + COOLER_DUMP(COOLER_ZOOMIFY.out.mcool, "/resolutions/2") +} diff --git a/tests/modules/cooler/zoomify/test.yml b/tests/modules/cooler/zoomify/test.yml new file mode 100644 index 00000000..79a5af2c --- /dev/null +++ b/tests/modules/cooler/zoomify/test.yml @@ -0,0 +1,8 @@ +- name: cooler zoomify test_cooler_zoomify + command: nextflow run tests/modules/cooler/zoomify -entry test_cooler_zoomify -c tests/config/nextflow.config + tags: + - cooler + - cooler/zoomify + files: + - path: output/cooler/test.bedpe + md5sum: 8d792beb609fff62b536c326661f9507 From eff515891de1673eb54c540b4966c479b28a7e7b Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:42:48 +0100 Subject: [PATCH 244/314] new module fargene (#1068) * new module fargene * Update main.nf * Update modules/fargene/main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/fargene/functions.nf | 78 ++++++++++++++++++++++++ modules/fargene/main.nf | 63 ++++++++++++++++++++ modules/fargene/meta.yml | 101 ++++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fargene/main.nf | 14 +++++ tests/modules/fargene/test.yml | 12 ++++ 6 files changed, 272 insertions(+) create mode 100644 modules/fargene/functions.nf create mode 100644 modules/fargene/main.nf create mode 100644 modules/fargene/meta.yml create mode 100644 tests/modules/fargene/main.nf create mode 100644 tests/modules/fargene/test.yml diff --git a/modules/fargene/functions.nf b/modules/fargene/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fargene/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf new file mode 100644 index 00000000..f2afe4be --- /dev/null +++ b/modules/fargene/main.nf @@ -0,0 +1,63 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '0.1' + +process FARGENE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fargene=0.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4" + } else { + container "quay.io/biocontainers/fargene:0.1--py27h21c881e_4" + } + + input: + // input may be fasta (for genomes or longer contigs) or paired-end fastq (for metagenome), the latter in addition with --meta flag + tuple val(meta), path(input) + val hmm_model + + output: + path "*.log" , emit: log + path "${prefix}/results_summary.txt" , emit: txt + tuple val(meta), path("${prefix}/hmmsearchresults/*.out") , optional: true, emit: hmm + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs.fasta") , optional: true, emit: orfs + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs-amino.fasta") , optional: true, emit: orfs_amino + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs.fasta") , optional: true, emit: contigs + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs-peptides.fasta") , optional: true, emit: contigs_pept + tuple val(meta), path("${prefix}/predictedGenes/*filtered.fasta") , optional: true, emit: filtered + tuple val(meta), path("${prefix}/predictedGenes/*filtered-peptides.fasta") , optional: true, emit: filtered_pept + tuple val(meta), path("${prefix}/retrievedFragments/all_retrieved_*.fastq") , optional: true, emit: fragments + tuple val(meta), path("${prefix}/retrievedFragments/retrievedFragments/trimmedReads/*.fasta"), optional: true, emit: trimmed + tuple val(meta), path("${prefix}/spades_assembly/*") , optional: true, emit: spades + tuple val(meta), path("${prefix}/tmpdir/*.fasta") , optional: true, emit: metagenome + tuple val(meta), path("${prefix}/tmpdir/*.out") , optional: true, emit: tmp + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + gzip \\ + -cdf $input \\ + > unziped.fa | + fargene \\ + $options.args \\ + -p $task.cpus \\ + -i unziped.fa \\ + --hmm-model $hmm_model \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/fargene/meta.yml b/modules/fargene/meta.yml new file mode 100644 index 00000000..98ec12bb --- /dev/null +++ b/modules/fargene/meta.yml @@ -0,0 +1,101 @@ +name: fargene +description: tool that takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output. +keywords: + - antibiotic resistance genes + - ARGs + - identifier + - metagenomic + - contigs +tools: + - fargene: + description: Fragmented Antibiotic Resistance Gene Identifier takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output + homepage: https://github.com/fannyhb/fargene + documentation: https://github.com/fannyhb/fargene + tool_dev_url: https://github.com/fannyhb/fargene + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: fasta or paired-end fastq file containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) + pattern: "*.{fasta}" + - hmm_model: + type: string + description: name of custom hidden markov model to be used [pre-defined class_a, class_b_1_2, class_b_3, class_c, class_d_1, class_d_2, qnr, tet_efflux, tet_rpg, tet_enzyme] + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: log file + pattern: "*.{log}" + - txt: + type: file + description: analysis summary text file + pattern: "*.{txt}" + - hmm: + type: file + description: output from hmmsearch + pattern: "*.{out}" + - orfs: + type: file + description: open reading frames (ORFs) + pattern: "*.{fasta}" + - orfs_amino: + type: file + description: protein translation of open reading frames (ORFs) + pattern: "*.{fasta}" + - contigs: + type: file + description: (complete) contigs that passed the final full-length classification + pattern: "*.{fasta}" + - contigs_pept: + type: file + description: parts of the contigs that passed the final classification step that aligned with the HMM, as amino acid sequences + pattern: "*.{fasta}" + - filtered: + type: file + description: sequences that passed the final classification step, but only the parts that where predicted by the HMM to be part of the gene + pattern: "*.{fasta}" + - filtered_pept: + type: file + description: sequences from filtered.fasta, translated in the same frame as the gene is predicted to be located + pattern: "*.{fasta}" + - fragments: + type: file + description: All quality controlled retrieved fragments that were classified as positive, together with its read-pair, gathered in two files + pattern: "*.{fastq}" + - trimmed: + type: file + description: The quality controlled retrieved fragments from each input file. + pattern: "*.{fasta}" + - spades: + type: directory + description: The output from the SPAdes assembly + pattern: "spades_assembly" + - metagenome: + type: file + description: The FASTQ to FASTA converted input files from metagenomic reads. + pattern: "*.{fasta}" + - tmp: + type: file + description: The from FASTQ to FASTA converted input files and their translated input sequences. Are only saved if option --store-peptides is used. + pattern: "*.{fasta}" + + +authors: + - "@louperelo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 86e4fe22..d05d6155 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -390,6 +390,10 @@ expansionhunter: - modules/expansionhunter/** - tests/modules/expansionhunter/** +fargene: + - modules/fargene/** + - tests/modules/fargene/** + fastani: - modules/fastani/** - tests/modules/fastani/** diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf new file mode 100644 index 00000000..f89392ff --- /dev/null +++ b/tests/modules/fargene/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FARGENE } from '../../../modules/fargene/main.nf' addParams( options: [:] ) + +workflow test_fargene { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) ] + hmm_model = 'class_a' + + FARGENE ( input, hmm_model ) +} diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml new file mode 100644 index 00000000..3db6699c --- /dev/null +++ b/tests/modules/fargene/test.yml @@ -0,0 +1,12 @@ +- name: fargene + command: nextflow run tests/modules/fargene -entry test_fargene -c tests/config/nextflow.config + tags: + - fargene + files: + - path: output/fargene/fargene_analysis.log + - path: output/fargene/test/hmmsearchresults/unziped-class_A-hmmsearched.out + - path: output/fargene/test/results_summary.txt + md5sum: 690d351cfc52577263ef4cfab1c81f50 + - path: output/fargene/test/tmpdir/tmp.out + - path: output/fargene/test/tmpdir/unziped-positives.out + md5sum: d41d8cd98f00b204e9800998ecf8427e From 632587a7fcf6c1d7d71b21560f60f1d4802e5d0e Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 15 Nov 2021 17:44:12 +0100 Subject: [PATCH 245/314] Add `bamutil/trimbam` (#1060) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add bamUtil trimBam * Update modules/bamutil/trimbam/main.nf Co-authored-by: Harshil Patel * Update modules/bamutil/trimbam/main.nf * Changes after code-review * YAML lint Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- modules/bamutil/trimbam/functions.nf | 78 ++++++++++++++++++++++++++ modules/bamutil/trimbam/main.nf | 44 +++++++++++++++ modules/bamutil/trimbam/meta.yml | 51 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bamutil/trimbam/main.nf | 15 +++++ tests/modules/bamutil/trimbam/test.yml | 8 +++ 6 files changed, 200 insertions(+) create mode 100644 modules/bamutil/trimbam/functions.nf create mode 100644 modules/bamutil/trimbam/main.nf create mode 100644 modules/bamutil/trimbam/meta.yml create mode 100644 tests/modules/bamutil/trimbam/main.nf create mode 100644 tests/modules/bamutil/trimbam/test.yml diff --git a/modules/bamutil/trimbam/functions.nf b/modules/bamutil/trimbam/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bamutil/trimbam/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..60949338 --- /dev/null +++ b/modules/bamutil/trimbam/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAMUTIL_TRIMBAM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bamutil=1.0.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1" + } else { + container "quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1" + } + + input: + tuple val(meta), path(bam), val(trim_left), val(trim_right) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + bam \\ + trimBam \\ + $bam \\ + ${prefix}.bam \\ + $options.args \\ + -L $trim_left \\ + -R $trim_right + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) + END_VERSIONS + """ +} diff --git a/modules/bamutil/trimbam/meta.yml b/modules/bamutil/trimbam/meta.yml new file mode 100644 index 00000000..a91ba0e1 --- /dev/null +++ b/modules/bamutil/trimbam/meta.yml @@ -0,0 +1,51 @@ +name: bamutil_trimbam +description: trims the end of reads in a SAM/BAM file, changing read ends to ‘N’ and quality to ‘!’, or by soft clipping +keywords: + - bam + - trim + - clipping + - bamUtil + - trimBam +tools: + - bamutil: + description: Programs that perform operations on SAM/BAM files, all built into a single executable, bam. + homepage: https://genome.sph.umich.edu/wiki/BamUtil + documentation: https://genome.sph.umich.edu/wiki/BamUtil:_trimBam + tool_dev_url: https://github.com/statgen/bamUtil + doi: "10.1101/gr.176552.114" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - trim_left: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + - trim_right: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Trimmed but unsorted BAM file + pattern: "*.bam" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d05d6155..13ef5868 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -46,6 +46,10 @@ bamtools/split: - modules/bamtools/split/** - tests/modules/bamtools/split/** +bamutil/trimbam: + - modules/bamutil/trimbam/** + - tests/modules/bamutil/trimbam/** + bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** diff --git a/tests/modules/bamutil/trimbam/main.nf b/tests/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..3699756c --- /dev/null +++ b/tests/modules/bamutil/trimbam/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' addParams( options: [:] ) + +workflow test_bamutil_trimbam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + 2, + 2 ] + + BAMUTIL_TRIMBAM ( input ) +} diff --git a/tests/modules/bamutil/trimbam/test.yml b/tests/modules/bamutil/trimbam/test.yml new file mode 100644 index 00000000..95ddc3b3 --- /dev/null +++ b/tests/modules/bamutil/trimbam/test.yml @@ -0,0 +1,8 @@ +- name: bamutil trimbam test_bamutil_trimbam + command: nextflow run tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c tests/config/nextflow.config + tags: + - bamutil/trimbam + - bamutil + files: + - path: output/bamutil/test.bam + md5sum: 9ddd0ecca82f7f3433383f3d1308970e From 5b1ce484b98bc8a5217c352f293543febdffcca4 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 15 Nov 2021 17:49:20 +0100 Subject: [PATCH 246/314] feat: _idx -> _tbi (#1074) Co-authored-by: FriederikeHanssen --- modules/gatk4/getpileupsummaries/main.nf | 2 +- modules/gatk4/getpileupsummaries/meta.yml | 2 +- modules/gatk4/mutect2/main.nf | 4 +-- modules/gatk4/mutect2/meta.yml | 8 ++--- .../modules/gatk4/getpileupsummaries/main.nf | 8 ++--- tests/modules/gatk4/mutect2/main.nf | 30 +++++++++---------- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 7919678c..f08d4d91 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -21,7 +21,7 @@ process GATK4_GETPILEUPSUMMARIES { input: tuple val(meta), path(bam), path(bai) path variants - path variants_idx + path variants_tbi path sites output: diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index 70158a8d..0add299b 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -35,7 +35,7 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. Is also used as sites file if no separate sites file is specified. pattern: "*.vcf.gz" - - variants_idx: + - variants_tbi: type: file description: Index file for the germline resource. pattern: "*.vcf.gz.tbi" diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 748b1673..dd8da406 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -28,9 +28,9 @@ process GATK4_MUTECT2 { path fastaidx path dict path germline_resource - path germline_resource_idx + path germline_resource_tbi path panel_of_normals - path panel_of_normals_idx + path panel_of_normals_tbi output: tuple val(meta), path("*.vcf.gz") , emit: vcf diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 44601e41..4a49b07a 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -66,18 +66,18 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. pattern: "*.vcf.gz" - - germline_resource_idx: + - germline_resource_tbi: type: file description: Index file for the germline resource. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" - panel_of_normals: type: file description: vcf file to be used as a panel of normals. pattern: "*.vcf.gz" - - panel_of_normals_idx: + - panel_of_normals_tbi: type: file description: Index for the panel of normals. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" output: - vcf: diff --git a/tests/modules/gatk4/getpileupsummaries/main.nf b/tests/modules/gatk4/getpileupsummaries/main.nf index 0c7d3fb6..66ee4990 100644 --- a/tests/modules/gatk4/getpileupsummaries/main.nf +++ b/tests/modules/gatk4/getpileupsummaries/main.nf @@ -11,10 +11,10 @@ workflow test_gatk4_getpileupsummaries_just_variants { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + variants_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) sites = [] - GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_tbi , sites ) } workflow test_gatk4_getpileupsummaries_separate_sites { @@ -24,8 +24,8 @@ workflow test_gatk4_getpileupsummaries_separate_sites { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + variants_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) sites = file( "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.interval_list" , checkIfExists: true) - GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_tbi , sites ) } diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 293739e4..a3821b64 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -20,11 +20,11 @@ workflow test_gatk4_mutect2_tumor_normal_pair { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_tumor_single { @@ -41,11 +41,11 @@ workflow test_gatk4_mutect2_tumor_single { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_cram_input { @@ -62,11 +62,11 @@ workflow test_gatk4_mutect2_cram_input { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_generate_pon { @@ -83,11 +83,11 @@ workflow test_gatk4_mutect2_generate_pon { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] - germline_resource_idx = [] + germline_resource_tbi = [] panel_of_normals = [] - panel_of_normals_idx = [] + panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } // mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. @@ -105,9 +105,9 @@ workflow test_gatk4_mutect2_mitochondria { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] - germline_resource_idx = [] + germline_resource_tbi = [] panel_of_normals = [] - panel_of_normals_idx = [] + panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } From 34268347447cd60013b69279a42aa9d081592735 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 15 Nov 2021 18:03:02 +0100 Subject: [PATCH 247/314] feat: fastaidx -> fai (#1073) --- modules/gatk4/applybqsr/main.nf | 2 +- modules/gatk4/applybqsr/meta.yml | 5 +++- modules/gatk4/baserecalibrator/main.nf | 2 +- modules/gatk4/baserecalibrator/meta.yml | 5 +++- .../gatk4/createsomaticpanelofnormals/main.nf | 2 +- .../createsomaticpanelofnormals/meta.yml | 4 ++-- modules/gatk4/filtermutectcalls/main.nf | 2 +- modules/gatk4/filtermutectcalls/meta.yml | 4 ++-- modules/gatk4/mutect2/main.nf | 2 +- modules/gatk4/mutect2/meta.yml | 4 ++-- .../nf-core/gatk_create_som_pon/main.nf | 6 ++--- .../nf-core/gatk_create_som_pon/meta.yml | 4 ++-- tests/modules/gatk4/filtermutectcalls/main.nf | 18 +++++++------- tests/modules/gatk4/mutect2/main.nf | 24 +++++++++---------- .../nf-core/gatk_create_som_pon/main.nf | 5 ++-- 15 files changed, 47 insertions(+), 42 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index e1a4d7b4..c89a4a4d 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -21,7 +21,7 @@ process GATK4_APPLYBQSR { input: tuple val(meta), path(input), path(input_index), path(bqsr_table) path fasta - path fastaidx + path fai path dict path intervals diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index e7419860..4e3b2f9a 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -34,12 +34,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index ff9eb1f9..ce6f5906 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -21,7 +21,7 @@ process GATK4_BASERECALIBRATOR { input: tuple val(meta), path(input), path(input_index) path fasta - path fastaidx + path fai path dict path intervalsBed path knownSites diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 7fd273e1..188340b4 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -31,12 +31,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index b3685171..49136256 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -21,7 +21,7 @@ process GATK4_CREATESOMATICPANELOFNORMALS { input: tuple val(meta), path(genomicsdb) path fasta - path fastaidx + path fai path dict output: diff --git a/modules/gatk4/createsomaticpanelofnormals/meta.yml b/modules/gatk4/createsomaticpanelofnormals/meta.yml index f0199ed6..e450c68a 100644 --- a/modules/gatk4/createsomaticpanelofnormals/meta.yml +++ b/modules/gatk4/createsomaticpanelofnormals/meta.yml @@ -28,10 +28,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index b54e07ed..6e10ff0f 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -21,7 +21,7 @@ process GATK4_FILTERMUTECTCALLS { input: tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) path fasta - path fastaidx + path fai path dict output: diff --git a/modules/gatk4/filtermutectcalls/meta.yml b/modules/gatk4/filtermutectcalls/meta.yml index f14f9404..7d85e2b9 100644 --- a/modules/gatk4/filtermutectcalls/meta.yml +++ b/modules/gatk4/filtermutectcalls/meta.yml @@ -53,10 +53,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index dd8da406..e0e2661b 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -25,7 +25,7 @@ process GATK4_MUTECT2 { val run_mito val interval_label path fasta - path fastaidx + path fai path dict path germline_resource path germline_resource_tbi diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 4a49b07a..83f6cb7c 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -54,10 +54,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf index 9b190584..40269a4a 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -14,7 +14,7 @@ workflow GATK_CREATE_SOM_PON { take: ch_mutect2_in // channel: [ val(meta), [ input ], [ input_index ], [] ] fasta // channel: /path/to/reference/fasta - fastaidx // channel: /path/to/reference/fasta/index + fai // channel: /path/to/reference/fasta/index dict // channel: /path/to/reference/fasta/dictionary pon_name // channel: name for panel of normals interval_file // channel: /path/to/interval/file @@ -25,7 +25,7 @@ workflow GATK_CREATE_SOM_PON { // //Perform variant calling for each sample using mutect2 module in panel of normals mode. // - GATK4_MUTECT2 ( input , false , true, false , [] , fasta , fastaidx , dict , [], [] , [] , [] ) + GATK4_MUTECT2 ( input, false, true, false, [], fasta, fai, dict, [], [], [], [] ) ch_versions = ch_versions.mix(GATK4_MUTECT2.out.versions.first()) // @@ -41,7 +41,7 @@ workflow GATK_CREATE_SOM_PON { //Panel of normals made from genomicsdb workspace using createsomaticpanelofnormals. // GATK4_GENOMICSDBIMPORT.out.genomicsdb.view() - GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fastaidx, dict ) + GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fai, dict ) ch_versions = ch_versions.mix(GATK4_CREATESOMATICPANELOFNORMALS.out.versions.first()) emit: diff --git a/subworkflows/nf-core/gatk_create_som_pon/meta.yml b/subworkflows/nf-core/gatk_create_som_pon/meta.yml index bc02b885..07404aae 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/meta.yml +++ b/subworkflows/nf-core/gatk_create_som_pon/meta.yml @@ -30,10 +30,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf index a425238b..5b2938e8 100644 --- a/tests/modules/gatk4/filtermutectcalls/main.nf +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -6,7 +6,7 @@ include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutect workflow test_gatk4_filtermutectcalls_base { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -18,15 +18,15 @@ workflow test_gatk4_filtermutectcalls_base { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_with_files { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -38,15 +38,15 @@ workflow test_gatk4_filtermutectcalls_with_files { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_use_val { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -58,8 +58,8 @@ workflow test_gatk4_filtermutectcalls_use_val { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index a3821b64..e163cf9c 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -8,8 +8,8 @@ include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gat workflow test_gatk4_mutect2_tumor_normal_pair { input = [ [ id:'test'], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], ["testN"] ] run_single = false @@ -17,14 +17,14 @@ workflow test_gatk4_mutect2_tumor_normal_pair { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_TEMPFIX_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_tumor_single { @@ -38,14 +38,14 @@ workflow test_gatk4_mutect2_tumor_single { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_cram_input { @@ -59,14 +59,14 @@ workflow test_gatk4_mutect2_cram_input { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_generate_pon { @@ -80,14 +80,14 @@ workflow test_gatk4_mutect2_generate_pon { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] germline_resource_tbi = [] panel_of_normals = [] panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } // mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. @@ -102,12 +102,12 @@ workflow test_gatk4_mutect2_mitochondria { run_mito = true interval_label = 'chr22' fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] germline_resource_tbi = [] panel_of_normals = [] panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf index d484ac2f..42427a1f 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -16,11 +16,10 @@ workflow test_gatk_create_som_pon { [] ] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) pon_name = "test_panel" interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) - GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fastaidx, dict, pon_name, interval_file ) - + GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fai, dict, pon_name, interval_file ) } From 4398056204b7c9685bc3e0888e82a315031b1f32 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 12:17:40 -0500 Subject: [PATCH 248/314] Macs2 calllpeak (#1038) * Add tests and yml file for macs2/callpeak * add format option for macs2 * update macs2/callpeak to accept format argument * update test.yml * update the container version. * try to fix the issue in conda container. * Update conda and containers * Going back to previous container versions Co-authored-by: JoseEspinosa --- modules/macs2/callpeak/main.nf | 13 ++++-- modules/macs2/callpeak/meta.yml | 63 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/macs2/callpeak/main.nf | 31 +++++++++++++ tests/modules/macs2/callpeak/test.yml | 38 ++++++++++++++++ 5 files changed, 146 insertions(+), 3 deletions(-) create mode 100644 modules/macs2/callpeak/meta.yml create mode 100644 tests/modules/macs2/callpeak/main.nf create mode 100644 tests/modules/macs2/callpeak/test.yml diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index d54d406d..94f8945b 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -13,9 +13,9 @@ process MACS2_CALLPEAK { conda (params.enable_conda ? "bioconda::macs2=2.2.7.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h0213d0e_1" + container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3" } else { - container "quay.io/biocontainers/macs2:2.2.7.1--py38h0213d0e_1" + container "quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3" } input: @@ -33,12 +33,19 @@ process MACS2_CALLPEAK { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = options.args.tokenize() def format = meta.single_end ? 'BAM' : 'BAMPE' def control = controlbam ? "--control $controlbam" : '' + if(args.contains('--format')){ + def id = args.findIndexOf{it=='--format'} + format = args[id+1] + args.remove(id+1) + args.remove(id) + } """ macs2 \\ callpeak \\ - $options.args \\ + ${args.join(' ')} \\ --gsize $macs2_gsize \\ --format $format \\ --name $prefix \\ diff --git a/modules/macs2/callpeak/meta.yml b/modules/macs2/callpeak/meta.yml new file mode 100644 index 00000000..afb949ec --- /dev/null +++ b/modules/macs2/callpeak/meta.yml @@ -0,0 +1,63 @@ +name: macs2_callpeak +description: Peak calling of enriched genomic regions of ChIP-seq and ATAC-seq experiments +keywords: + - alignment + - atac-seq + - chip-seq + - peak-calling +tools: + - macs2: + description: Model Based Analysis for ChIP-Seq data + homepage: None + documentation: https://docs.csc.fi/apps/macs2/ + tool_dev_url: https://github.com/macs3-project/MACS + doi: "https://doi.org/10.1101/496521" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ipbam: + type: file + description: The ChIP-seq treatment file + - controlbam: + type: file + description: The control file + - macs2_gsize: + type: string + description: Effective genome size. It can be 1.0e+9 or 1000000000, or shortcuts:'hs' for human (2.7e9), + 'mm' for mouse (1.87e9), 'ce' for C. elegans (9e7) and 'dm' for fruitfly (1.2e8) + +output: + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - peak: + type: file + description: BED file containing annotated peaks + pattern: "*.gappedPeak,*.narrowPeak}" + - xls: + type: file + description: xls file containing annotated peaks + pattern: "*.xls" + - gapped: + type: file + description: Optional BED file containing gapped peak + pattern: "*.gappedPeak" + - bed: + type: file + description: Optional BED file containing peak summits locations for every peak + pattern: "*.bed" + - bdg: + type: file + description: Optional bedGraph files for input and treatment input samples + pattern: "*.bdg" + +authors: + - "@ntoda03" + - "@JoseEspinosa" + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 13ef5868..6c0b7b34 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -778,6 +778,10 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +macs2/callpeak: + - modules/macs2/callpeak/** + - tests/modules/macs2/callpeak/** + malt/build: - modules/malt/build/** - tests/modules/malt/build_test/** diff --git a/tests/modules/macs2/callpeak/main.nf b/tests/modules/macs2/callpeak/main.nf new file mode 100644 index 00000000..db598564 --- /dev/null +++ b/tests/modules/macs2/callpeak/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) +include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) +include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--format BED --qval 1 --nomodel --extsize 200"] ) + +workflow test_macs2_callpeak_bed { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['pacbio']['genemodel1'], checkIfExists: true)], + []] + + MACS2_CALLPEAK_BED ( input, 4000 ) +} + +workflow test_macs2_callpeak { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + []] + + MACS2_CALLPEAK ( input, 40000 ) +} + +workflow test_macs2_callpeak_ctrl { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ]] + + MACS2_CALLPEAK_CTRL ( input, 40000 ) +} diff --git a/tests/modules/macs2/callpeak/test.yml b/tests/modules/macs2/callpeak/test.yml new file mode 100644 index 00000000..424a9746 --- /dev/null +++ b/tests/modules/macs2/callpeak/test.yml @@ -0,0 +1,38 @@ +- name: macs2 callpeak test_macs2_callpeak_bed + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/macs2/test_peaks.xls + md5sum: 762383e3a35e1f9ac3834fd6b2926092 + - path: output/macs2/test_summits.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: macs2 callpeak test_macs2_callpeak + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 2e4da1c1704595e12aaf99cc715ad70c + - path: output/macs2/test_peaks.xls + md5sum: 5d65cb3dbd5421ea3bb5b490a100e9a4 + - path: output/macs2/test_summits.bed + md5sum: 26f0f97b6c14dbca129e947a58067c82 + +- name: macs2 callpeak test_macs2_callpeak_ctrl + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 653e1108cc57ca07d0f60fc0f4fb8ba3 + - path: output/macs2/test_peaks.xls + md5sum: bf86546faa7b581b5209c29b22046a0a + - path: output/macs2/test_summits.bed + md5sum: 4f3c7c53a1d730d90d1b3dd9d3197af4 From 8d9e8ae839df0e6f0070f8615e69f3103f9f3359 Mon Sep 17 00:00:00 2001 From: avantonder Date: Mon, 15 Nov 2021 17:26:37 +0000 Subject: [PATCH 249/314] Add Medaka module (#992) * add racon * add medaka module * add medaka module * add medaka module * add medaka module * add medaka module * add medaka module * Indentation * Apply suggestions from code review Co-authored-by: FriederikeHanssen * Update main.nf * Update main.nf * Apply suggestions from code review Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/medaka/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/medaka/main.nf | 47 ++++++++++++++++++++ modules/medaka/meta.yml | 47 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/medaka/main.nf | 16 +++++++ tests/modules/medaka/test.yml | 7 +++ 6 files changed, 199 insertions(+) create mode 100644 modules/medaka/functions.nf create mode 100644 modules/medaka/main.nf create mode 100644 modules/medaka/meta.yml create mode 100644 tests/modules/medaka/main.nf create mode 100644 tests/modules/medaka/test.yml diff --git a/modules/medaka/functions.nf b/modules/medaka/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/medaka/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf new file mode 100644 index 00000000..a0db4150 --- /dev/null +++ b/modules/medaka/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MEDAKA { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::medaka=1.4.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0" + } else { + container "quay.io/biocontainers/medaka:1.4.4--py38h130def0_0" + } + + input: + tuple val(meta), path(reads), path(assembly) + + output: + tuple val(meta), path("*.fa.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + medaka_consensus \\ + -t $task.cpus \\ + $options.args \\ + -i $reads \\ + -d $assembly \\ + -o ./ + + mv consensus.fasta ${prefix}.fa + + gzip -n ${prefix}.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( medaka --version 2>&1 | sed 's/medaka //g' ) + END_VERSIONS + """ +} diff --git a/modules/medaka/meta.yml b/modules/medaka/meta.yml new file mode 100644 index 00000000..d194464f --- /dev/null +++ b/modules/medaka/meta.yml @@ -0,0 +1,47 @@ +name: medaka +description: A tool to create consensus sequences and variant calls from nanopore sequencing data +keywords: + - assembly + - polishing + - nanopore +tools: + - medaka: + description: Neural network sequence error correction. + homepage: https://nanoporetech.github.io/medaka/index.html + documentation: https://nanoporetech.github.io/medaka/index.html + tool_dev_url: https://github.com/nanoporetech/medaka + doi: "" + licence: ['Mozilla Public License 2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input nanopore fasta/FastQ files + pattern: "*.{fasta,fa,fastq,fastq.gz,fq,fq.gz}" + - assembly: + type: file + description: Genome assembly + pattern: "*.{fasta,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - assembly: + type: file + description: Polished genome assembly + pattern: "*.fa.gz" + +authors: + - "@avantonder" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6c0b7b34..b286f114 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -822,6 +822,10 @@ maxbin2: - modules/maxbin2/** - tests/modules/maxbin2/** +medaka: + - modules/medaka/** + - tests/modules/medaka/** + megahit: - modules/megahit/** - tests/modules/megahit/** diff --git a/tests/modules/medaka/main.nf b/tests/modules/medaka/main.nf new file mode 100644 index 00000000..300e086b --- /dev/null +++ b/tests/modules/medaka/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MEDAKA } from '../../../modules/medaka/main.nf' addParams( options: [suffix:'.polished.genome'] ) + +workflow test_medaka { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + MEDAKA ( input ) +} diff --git a/tests/modules/medaka/test.yml b/tests/modules/medaka/test.yml new file mode 100644 index 00000000..9ce5521e --- /dev/null +++ b/tests/modules/medaka/test.yml @@ -0,0 +1,7 @@ +- name: medaka test_medaka + command: nextflow run ./tests/modules/medaka -entry test_medaka -c tests/config/nextflow.config + tags: + - medaka + files: + - path: output/medaka/test.polished.genome.fa.gz + md5sum: f42303f1d6c2c79175faeb00e10b9a6e \ No newline at end of file From 466b964b37b7241a83fff9c3d7ddc14ceada20ff Mon Sep 17 00:00:00 2001 From: mjakobs <25904555+mjakobs@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:40:46 +0000 Subject: [PATCH 250/314] add Kronatools KTImportTaxonomy (#928) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * created and initialised krona module * Added kronatools/ktimporttaxonomy module * removing previous redundant work * added contains info for html * edited contains in test.yml * Update get versions Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * remove old syntax Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * rewording module description Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * added detailed keywords Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * update syntax and tool version * fixed meta.yml issues * remove contains line from test.yml * re-wrote module after nf-core/tools update - should work now * removed md5 * Update modules/kronatools/ktimporttaxonomy/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * update meta save * removed typo * double quotes to single quotes around html * re-ran test, which updated md5 * removed md5 * 'classifier' removed to fix linting * update version * removed erroneous ktimporttaxonomy2 * Updated input to include meta and database * fixed tab issues in yaml * added `contains` to test.yml * edited `contains` in test.yml * trying another `contains` * retrying `contains` * contains with extra line * removed classifier from tag * Apply suggestions from code review * Update meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- .../kronatools/ktimporttaxonomy/functions.nf | 78 +++++++++++++++++++ modules/kronatools/ktimporttaxonomy/main.nf | 39 ++++++++++ modules/kronatools/ktimporttaxonomy/meta.yml | 44 +++++++++++ tests/config/pytest_modules.yml | 4 + .../kronatools/ktimporttaxonomy/main.nf | 15 ++++ .../kronatools/ktimporttaxonomy/test.yml | 9 +++ 6 files changed, 189 insertions(+) create mode 100644 modules/kronatools/ktimporttaxonomy/functions.nf create mode 100644 modules/kronatools/ktimporttaxonomy/main.nf create mode 100644 modules/kronatools/ktimporttaxonomy/meta.yml create mode 100644 tests/modules/kronatools/ktimporttaxonomy/main.nf create mode 100644 tests/modules/kronatools/ktimporttaxonomy/test.yml diff --git a/modules/kronatools/ktimporttaxonomy/functions.nf b/modules/kronatools/ktimporttaxonomy/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/kronatools/ktimporttaxonomy/main.nf b/modules/kronatools/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..893bc5b2 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KRONATOOLS_KTIMPORTTAXONOMY { + tag "${meta.id}" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::krona=2.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2" + } else { + container "quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2" + } + + input: + tuple val(meta), path(report) + path "taxonomy/taxonomy.tab" + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + script: + def VERSION='2.8' + """ + ktImportTaxonomy "$report" -tax taxonomy + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/ktimporttaxonomy/meta.yml b/modules/kronatools/ktimporttaxonomy/meta.yml new file mode 100644 index 00000000..f37f2db4 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/meta.yml @@ -0,0 +1,44 @@ +name: kronatools_ktimporttaxonomy +description: KronaTools Import Taxonomy imports taxonomy classifications and produces an interactive Krona plot. +keywords: + - plot + - taxonomy + - interactive + - html + - visualisation + - krona chart +tools: + - kronatools: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - database: + type: path + description: "Path to the taxonomy database downloaded by kronatools/kronadb" + - report: + type: file + description: "A tab-delimited file with taxonomy IDs and (optionally) query IDs, magnitudes, and scores. Query IDs are taken from column 1, taxonomy IDs from column 2, and scores from column 3. Lines beginning with # will be ignored." + pattern: "*.{tsv}" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - html: + type: file + description: A html file containing an interactive krona plot. + pattern: "*.{html}" + +authors: + - "@mjakobs" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b286f114..b8e5e3d3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -718,6 +718,10 @@ kronatools/kronadb: - modules/kronatools/kronadb/** - tests/modules/kronatools/kronadb/** +kronatools/ktimporttaxonomy: + - modules/kronatools/ktimporttaxonomy/** + - tests/modules/kronatools/ktimporttaxonomy/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** diff --git a/tests/modules/kronatools/ktimporttaxonomy/main.nf b/tests/modules/kronatools/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..d7b08a2f --- /dev/null +++ b/tests/modules/kronatools/ktimporttaxonomy/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONATOOLS_KTIMPORTTAXONOMY } from '../../../../modules/kronatools/ktimporttaxonomy/main.nf' addParams( options: [:] ) + +workflow test_kronatools_ktimporttaxonomy { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) ] + + taxonomy = [ file(params.test_data['generic']['txt']['hello'] , checkIfExists: true) ] + + KRONATOOLS_KTIMPORTTAXONOMY ( input, taxonomy ) +} diff --git a/tests/modules/kronatools/ktimporttaxonomy/test.yml b/tests/modules/kronatools/ktimporttaxonomy/test.yml new file mode 100644 index 00000000..15882b2e --- /dev/null +++ b/tests/modules/kronatools/ktimporttaxonomy/test.yml @@ -0,0 +1,9 @@ +- name: kronatools ktimporttaxonomy test_kronatools_ktimporttaxonomy + command: nextflow run tests/modules/kronatools/ktimporttaxonomy -entry test_kronatools_ktimporttaxonomy -c tests/config/nextflow.config + tags: + - kronatools/ktimporttaxonomy + - kronatools + files: + - path: output/kronatools/taxonomy.krona.html + contains: + - "DOCTYPE html PUBLIC" From 527ccdb4198a964d09ba43b1b33ef4de3f40cfcf Mon Sep 17 00:00:00 2001 From: avantonder Date: Mon, 15 Nov 2021 17:50:56 +0000 Subject: [PATCH 251/314] Add Miniasm module (#962) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add racon * add miniasm module * edit miniasm module * edit miniasm module * Update tests/modules/racon/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update tests/modules/racon/test.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update modules/miniasm/meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update main.nf Add some spaces. * Update meta.yml Correct DOI * Update main.nf * Apply suggestions from code review * Update tests/modules/miniasm/test.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- modules/miniasm/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/miniasm/main.nf | 48 ++++++++++++++++++++ modules/miniasm/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/miniasm/main.nf | 15 +++++++ tests/modules/miniasm/test.yml | 9 ++++ 6 files changed, 205 insertions(+) create mode 100644 modules/miniasm/functions.nf create mode 100644 modules/miniasm/main.nf create mode 100644 modules/miniasm/meta.yml create mode 100644 tests/modules/miniasm/main.nf create mode 100644 tests/modules/miniasm/test.yml diff --git a/modules/miniasm/functions.nf b/modules/miniasm/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/miniasm/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf new file mode 100644 index 00000000..d2652fab --- /dev/null +++ b/modules/miniasm/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MINIASM { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::miniasm=0.3_r179" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2" + } else { + container "quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2" + } + + input: + tuple val(meta), path(reads), path(paf) + + output: + tuple val(meta), path("*.gfa.gz") , emit: gfa + tuple val(meta), path("*.fasta.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + miniasm \\ + $options.args \\ + -f $reads \\ + $paf > \\ + ${prefix}.gfa + + awk '/^S/{print ">"\$2"\\n"\$3}' "${prefix}.gfa" | fold > ${prefix}.fasta + + gzip -n ${prefix}.gfa + gzip -n ${prefix}.fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( miniasm -V 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/miniasm/meta.yml b/modules/miniasm/meta.yml new file mode 100644 index 00000000..e8aedb9a --- /dev/null +++ b/modules/miniasm/meta.yml @@ -0,0 +1,51 @@ +name: miniasm +description: A very fast OLC-based de novo assembler for noisy long reads +keywords: + - assembly + - pacbio + - nanopore +tools: + - miniasm: + description: Ultrafast de novo assembly for long noisy reads (though having no consensus step) + homepage: https://github.com/lh3/miniasm + documentation: https://github.com/lh3/miniasm + tool_dev_url: https://github.com/lh3/miniasm + doi: "10.1093/bioinformatics/btw152" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input PacBio/ONT FastQ files. + pattern: "*.{fastq,fastq.gz,fq,fq.gz}" + - paf: + type: file + description: Alignment in PAF format + pattern: "*{.paf,.paf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gfa: + type: file + description: Assembly graph + pattern: "*.gfa.gz" + - assembly: + type: file + description: Genome assembly + pattern: "*.fasta.gz" + +authors: + - "@avantonder" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b8e5e3d3..d925b76d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -862,6 +862,10 @@ minia: - modules/minia/** - tests/modules/minia/** +miniasm: + - modules/miniasm/** + - tests/modules/miniasm/** + minimap2/align: - modules/minimap2/align/** - tests/modules/minimap2/align/** diff --git a/tests/modules/miniasm/main.nf b/tests/modules/miniasm/main.nf new file mode 100644 index 00000000..f3d23d56 --- /dev/null +++ b/tests/modules/miniasm/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MINIASM } from '../../../modules/miniasm/main.nf' addParams( options: [suffix:'.assembly'] ) + +workflow test_miniasm { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['nanopore']['overlap_paf'], checkIfExists: true) + ] + + MINIASM ( input ) +} diff --git a/tests/modules/miniasm/test.yml b/tests/modules/miniasm/test.yml new file mode 100644 index 00000000..7596a269 --- /dev/null +++ b/tests/modules/miniasm/test.yml @@ -0,0 +1,9 @@ +- name: miniasm test_miniasm + command: nextflow run tests/modules/miniasm -entry test_miniasm -c tests/config/nextflow.config + tags: + - miniasm + files: + - path: output/miniasm/test.assembly.gfa.gz + md5sum: c68e4c2b64338d1c0f5b79b32934da14 + - path: output/miniasm/test.assembly.fasta.gz + md5sum: d2f78ae618c02744e7a57bf4706ab8b4 From 2af071ed0d70ec7fa25b08e3fa48ea6fd9564a38 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 15 Nov 2021 12:18:46 -0600 Subject: [PATCH 252/314] Fix subworkflows seperate from modules (#933) * ci: Remove pytest_subworkflows * ci(bam_sort_samtools): Depend on paths-filter instead of pytest-workflow Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Jose Espinosa-Carrasco * ci: Revert back to one job branch * ci(align_bowtie2): Run tests that depend on bam_sort_samtools * ci: Fix anchor not being created yet * ci: Update sra_fastq tags and pytest_modules * fix(bam_sort_samtools): Update nextflow.config with params * test(subworkflows): Update gatk_create_som_pon tags * ci: Point to subworkflow_hacks branch of nf-core tools Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Jose Espinosa-Carrasco --- .github/workflows/nf-core-linting.yml | 2 +- .github/workflows/pytest-workflow.yml | 118 +----------------- .../nf-core/bam_sort_samtools/nextflow.config | 4 +- tests/config/pytest_modules.yml | 40 +++++- tests/config/pytest_subworkflows.yml | 21 ---- tests/modules/sratools/fasterqdump/test.yml | 6 +- .../nf-core/align_bowtie2/test.yml | 32 ++--- .../nf-core/bam_sort_samtools/test.yml | 24 ++-- .../nf-core/gatk_create_som_pon/test.yml | 7 +- 9 files changed, 77 insertions(+), 177 deletions(-) delete mode 100644 tests/config/pytest_subworkflows.yml diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 55b8c296..ce441413 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -60,7 +60,7 @@ jobs: # FIXME: Remove this when nf-core modules lint stabilizes and install stable release - name: Install nf-core tools development version - run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev + run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@subworkflow_hacks - name: Install Nextflow env: diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 0b509527..7cbb2689 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -6,11 +6,8 @@ on: branches: [master] jobs: - ########### - # Modules # - ########### - module_changes: - name: Check for changes in the modules + changes: + name: Check for changes runs-on: ubuntu-latest outputs: # Expose matched filters as job 'modules' output variable @@ -23,120 +20,17 @@ jobs: with: filters: "tests/config/pytest_modules.yml" - module_test: + test: runs-on: ubuntu-20.04 name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: module_changes - if: needs.module_changes.outputs.modules != '[]' + needs: changes + if: needs.changes.outputs.modules != '[]' strategy: fail-fast: false matrix: nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.module_changes.outputs.modules) }} - profile: ["docker", "singularity", "conda"] - env: - NXF_ANSI_LOG: false - steps: - - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow - - - uses: actions/cache@v2 - with: - path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} - restore-keys: | - ${{ runner.os }}-nextflow- - - - name: Install Nextflow - env: - NXF_VER: ${{ matrix.nxf_version }} - CAPSULE_LOG: none - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - - name: Set up Singularity - if: matrix.profile == 'singularity' - uses: eWaterCycle/setup-singularity@v5 - with: - singularity-version: 3.7.1 - - - name: Setup miniconda - if: matrix.profile == 'conda' - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - channels: conda-forge,bioconda,defaults - python-version: ${{ matrix.python-version }} - - - name: Conda clean - if: matrix.profile == 'conda' - run: conda clean -a - - # Test the module - - name: Run pytest-workflow - # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - - - name: Upload logs on failure - if: failure() - uses: actions/upload-artifact@v2 - with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} - path: | - /home/runner/pytest_workflow_*/*/.nextflow.log - /home/runner/pytest_workflow_*/*/log.out - /home/runner/pytest_workflow_*/*/log.err - /home/runner/pytest_workflow_*/*/work - - ################ - # Subworkflows # - ################ - subworkflow_changes: - name: Check for changes in the subworkflows - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'subworkflows' output variable - subworkflows: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v2 - - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: "tests/config/pytest_subworkflows.yml" - - subworkflow_test: - runs-on: ubuntu-20.04 - - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: subworkflow_changes - if: needs.subworkflow_changes.outputs.subworkflows != '[]' - strategy: - fail-fast: false - matrix: - nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }} + tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false diff --git a/subworkflows/nf-core/bam_sort_samtools/nextflow.config b/subworkflows/nf-core/bam_sort_samtools/nextflow.config index 2fd55747..72128aad 100644 --- a/subworkflows/nf-core/bam_sort_samtools/nextflow.config +++ b/subworkflows/nf-core/bam_sort_samtools/nextflow.config @@ -1 +1,3 @@ -params.options = [:] +params.sort_options = [:] +params.index_options = [:] +params.stats_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d925b76d..baaee3b8 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -466,7 +466,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: +gatk4/createsomaticpanelofnormals: &gatk4/createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -482,7 +482,7 @@ gatk4/filtermutectcalls: - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: +gatk4/genomicsdbimport: &gatk4/genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -1163,7 +1163,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: +samtools/index: &samtools/index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1175,7 +1175,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: +samtools/sort: &samtools/sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1251,11 +1251,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: +sratools/fasterqdump: &sratools/fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: +sratools/prefetch: &sratools/prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1374,3 +1374,31 @@ yara/index: yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** + +subworkflows/bam_stats_samtools: &subworkflows/bam_stats_samtools + - subworkflows/nf-core/bam_stats_samtools/** + - tests/subworkflows/nf-core/bam_stats_samtools/** + +subworkflows/bam_sort_samtools: &subworkflows/bam_sort_samtools + - subworkflows/nf-core/bam_sort_samtools/** + - tests/subworkflows/nf-core/bam_sort_samtools/** + - *samtools/sort + - *samtools/index + - *subworkflows/bam_stats_samtools + +subworkflows/align_bowtie2: + - subworkflows/nf-core/align_bowtie2/** + - tests/subworkflows/nf-core/align_bowtie2/** + - *subworkflows/bam_sort_samtools + +subworkflows/sra_fastq: + - subworkflows/nf-core/sra_fastq/** + - tests/subworkflows/nf-core/sra_fastq/** + - *sratools/fasterqdump + - *sratools/prefetch + +subworkflows/gatk_create_som_pon: + - subworkflows/nf-core/gatk_create_som_pon/** + - tests/subworkflows/nf-core/gatk_create_som_pon/** + - *gatk4/genomicsdbimport + - *gatk4/createsomaticpanelofnormals diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml deleted file mode 100644 index 4f9c5514..00000000 --- a/tests/config/pytest_subworkflows.yml +++ /dev/null @@ -1,21 +0,0 @@ -subworkflows/align_bowtie2: - - subworkflows/nf-core/align_bowtie2/** - - tests/subworkflows/nf-core/align_bowtie2/** - -subworkflows/bam_stats_samtools: - - subworkflows/nf-core/bam_stats_samtools/** - - tests/subworkflows/nf-core/bam_stats_samtools/** - -subworkflows/bam_sort_samtools: - - subworkflows/nf-core/bam_sort_samtools/** - - tests/subworkflows/nf-core/bam_sort_samtools/** - -subworkflows/sra_fastq: - - subworkflows/nf-core/sra_fastq/** - - tests/subworkflows/nf-core/sra_fastq/** - -subworkflows/gatk_create_som_pon: - - subworkflows/nf-core/gatk_create_som_pon/** - - tests/subworkflows/nf-core/gatk_create_som_pon/** - - \ No newline at end of file diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml index 94da4ed8..7d022a0d 100644 --- a/tests/modules/sratools/fasterqdump/test.yml +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -1,8 +1,7 @@ - name: sratools fasterqdump test_sratools_fasterqdump_single_end command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config tags: - - sratools - - sratools/fasterqdump + - subworkflows/sra_fastq files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -12,8 +11,7 @@ - name: sratools fasterqdump test_sratools_fasterqdump_paired_end command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config tags: - - sratools - - sratools/fasterqdump + - subworkflows/sra_fastq files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 51261a14..116ea961 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -5,14 +5,14 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log @@ -46,14 +46,14 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index e2fc27d8..88ea9d5a 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -4,12 +4,12 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: e4c77897d6824ce4df486d1b100618af @@ -28,12 +28,12 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: bbb2db225f140e69a4ac577f74ccc90f diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index a4478044..e6d80409 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -2,11 +2,10 @@ command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config tags: - subworkflows/gatk_create_som_pon + - gatk4 # Modules - - gatk4 - - gatk4 - - gatk4/genomicsdbimport - - gatk4/createsomaticpanelofnormals + # - gatk4/genomicsdbimport + # - gatk4/createsomaticpanelofnormals files: # gatk4 mutect2 - path: output/gatk4/test1.vcf.gz From ad460103851f353a373ed6a3064cb27ba1bc622e Mon Sep 17 00:00:00 2001 From: SusiJo <43847534+SusiJo@users.noreply.github.com> Date: Mon, 15 Nov 2021 19:29:55 +0100 Subject: [PATCH 253/314] Added new module csvtk/split (#1014) * added module csvtk/split * removed todo statement * adjusted meta map names * changed tests to use generic input files * added module in pytest * updated test-data paths * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/csvtk/split/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/csvtk/split/main.nf | 50 +++++++++++++++++++ modules/csvtk/split/meta.yml | 52 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 10 +++- tests/modules/csvtk/split/main.nf | 27 +++++++++++ tests/modules/csvtk/split/test.yml | 25 ++++++++++ 7 files changed, 244 insertions(+), 2 deletions(-) create mode 100644 modules/csvtk/split/functions.nf create mode 100644 modules/csvtk/split/main.nf create mode 100644 modules/csvtk/split/meta.yml create mode 100644 tests/modules/csvtk/split/main.nf create mode 100644 tests/modules/csvtk/split/test.yml diff --git a/modules/csvtk/split/functions.nf b/modules/csvtk/split/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/csvtk/split/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf new file mode 100644 index 00000000..727e046a --- /dev/null +++ b/modules/csvtk/split/main.nf @@ -0,0 +1,50 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CSVTK_SPLIT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" + } else { + container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" + } + + input: + tuple val(meta), path(csv) + val in_format + val out_format + + output: + tuple val(meta), path("*.${out_extension}"), emit: split_csv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) + def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) + out_extension = out_format == "tsv" ? 'tsv' : 'csv' + """ + sed -i.bak '/^##/d' $csv + csvtk \\ + split \\ + $options.args \\ + --num-cpus $task.cpus \\ + $delimiter \\ + $out_delimiter \\ + $csv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) + END_VERSIONS + """ +} diff --git a/modules/csvtk/split/meta.yml b/modules/csvtk/split/meta.yml new file mode 100644 index 00000000..45b71d14 --- /dev/null +++ b/modules/csvtk/split/meta.yml @@ -0,0 +1,52 @@ +name: csvtk_split +description: Splits CSV/TSV into multiple files according to column values +keywords: + - split + - csv + - tsv +tools: + - csvtk: + description: + CSVTK is a cross-platform, efficient and practical CSV/TSV toolkit + that allows rapid data investigation and manipulation. + homepage: https://bioinf.shenwei.me/csvtk/ + documentation: https://bioinf.shenwei.me/csvtk/ + tool_dev_url: https://github.com/shenwei356/csvtk + doi: "" + licence: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - csv: + type: file + description: CSV/TSV file + pattern: "*.{csv,tsv}" + - in_format: + type: string + description: Input format (csv, tab, or a delimiting character) + pattern: "*" + - out_format: + type: string + description: Output format (csv, tab, or a delimiting character) + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - split_csv: + type: file + description: Split CSV/TSV file + pattern: "*.{csv,tsv}" + +authors: + - "@SusiJo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index baaee3b8..acf36372 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -310,6 +310,10 @@ csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** +csvtk/split: + - modules/csvtk/split/** + - tests/modules/csvtk/split/** + custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 46232ef9..6ac4472c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -34,7 +34,7 @@ params { contigs_genome_maf_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.maf.gz" contigs_genome_par = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.par" lastdb_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/lastdb.tar.gz" - + baits_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/baits.interval_list" targets_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/targets.interval_list" } @@ -249,11 +249,17 @@ params { } } 'generic' { + 'csv' { + test_csv = "${test_data_dir}/generic/csv/test.csv" + } 'notebooks' { rmarkdown = "${test_data_dir}/generic/notebooks/rmarkdown/rmarkdown_notebook.Rmd" ipython_md = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.md" ipython_ipynb = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.ipynb" } + 'tsv' { + test_tsv = "${test_data_dir}/generic/tsv/test.tsv" + } 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } @@ -285,6 +291,6 @@ params { test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" } - } + } } } diff --git a/tests/modules/csvtk/split/main.nf b/tests/modules/csvtk/split/main.nf new file mode 100644 index 00000000..8dfd4053 --- /dev/null +++ b/tests/modules/csvtk/split/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' addParams( options: [args: "-C '&' --fields 'first_name' "]) + +workflow test_csvtk_split_tsv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['tsv']['test_tsv'], checkIfExists: true) ] + ] + in_format = "tsv" + out_format = "tsv" + CSVTK_SPLIT ( input, in_format, out_format ) +} + +workflow test_csvtk_split_csv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['csv']['test_csv'], checkIfExists: true) ] + ] + in_format = "csv" + out_format = "csv" + CSVTK_SPLIT( input, in_format, out_format ) +} diff --git a/tests/modules/csvtk/split/test.yml b/tests/modules/csvtk/split/test.yml new file mode 100644 index 00000000..ade2fe48 --- /dev/null +++ b/tests/modules/csvtk/split/test.yml @@ -0,0 +1,25 @@ +- name: csvtk split test_csvtk_split_tsv + command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_tsv -c tests/config/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.tsv + md5sum: 589a2add7f0b8e998d4959e5d883e7d5 + - path: output/csvtk/test-Rob.tsv + md5sum: 6c5555d689c4e685d35d6e394ad6e1e6 + - path: output/csvtk/test-Robert.tsv + md5sum: 45ae6da8111096746d1736d34220a3ec + +- name: csvtk split test_csvtk_split_csv + command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_csv -c tests/config/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.csv + md5sum: 71a931dae6f15f5ddb0318c7d4afe81e + - path: output/csvtk/test-Rob.csv + md5sum: efc4bc507021043a3bf2fb0724c4a216 + - path: output/csvtk/test-Robert.csv + md5sum: 8de2f076e64252c2abed69b9c2a3a386 From 2294ff7826eb8f49b006e5428328638473cdd028 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 11:32:53 -0700 Subject: [PATCH 254/314] add ncbi-genome-download module (#980) * add ncbi-genome-download module * Update modules/ncbigenomedownload/main.nf Co-authored-by: Gregor Sturm Co-authored-by: Harshil Patel --- modules/ncbigenomedownload/functions.nf | 78 +++++++++++++++++++ modules/ncbigenomedownload/main.nf | 56 ++++++++++++++ modules/ncbigenomedownload/meta.yml | 91 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/ncbigenomedownload/main.nf | 16 ++++ tests/modules/ncbigenomedownload/test.yml | 11 +++ 6 files changed, 256 insertions(+) create mode 100644 modules/ncbigenomedownload/functions.nf create mode 100644 modules/ncbigenomedownload/main.nf create mode 100644 modules/ncbigenomedownload/meta.yml create mode 100644 tests/modules/ncbigenomedownload/main.nf create mode 100644 tests/modules/ncbigenomedownload/test.yml diff --git a/modules/ncbigenomedownload/functions.nf b/modules/ncbigenomedownload/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ncbigenomedownload/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..ffa53871 --- /dev/null +++ b/modules/ncbigenomedownload/main.nf @@ -0,0 +1,56 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NCBIGENOMEDOWNLOAD { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ncbi-genome-download=0.3.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1" + } else { + container "quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1" + } + + input: + val meta + path accessions + + output: + tuple val(meta), path("*_genomic.gbff.gz") , emit: gbk , optional: true + tuple val(meta), path("*_genomic.fna.gz") , emit: fna , optional: true + tuple val(meta), path("*_rm.out.gz") , emit: rm , optional: true + tuple val(meta), path("*_feature_table.txt.gz") , emit: features, optional: true + tuple val(meta), path("*_genomic.gff.gz") , emit: gff , optional: true + tuple val(meta), path("*_protein.faa.gz") , emit: faa , optional: true + tuple val(meta), path("*_protein.gpff.gz") , emit: gpff , optional: true + tuple val(meta), path("*_wgsmaster.gbff.gz") , emit: wgs_gbk , optional: true + tuple val(meta), path("*_cds_from_genomic.fna.gz"), emit: cds , optional: true + tuple val(meta), path("*_rna.fna.gz") , emit: rna , optional: true + tuple val(meta), path("*_rna_from_genomic.fna.gz"), emit: rna_fna , optional: true + tuple val(meta), path("*_assembly_report.txt") , emit: report , optional: true + tuple val(meta), path("*_assembly_stats.txt") , emit: stats , optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def accessions_opt = accessions ? "-A ${accessions}" : "" + """ + ncbi-genome-download \\ + $options.args \\ + $accessions_opt \\ + --output-folder ./ \\ + --flat-output + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( ncbi-genome-download --version ) + END_VERSIONS + """ +} diff --git a/modules/ncbigenomedownload/meta.yml b/modules/ncbigenomedownload/meta.yml new file mode 100644 index 00000000..fd9e0a45 --- /dev/null +++ b/modules/ncbigenomedownload/meta.yml @@ -0,0 +1,91 @@ +name: ncbigenomedownload +description: A tool to quickly download assemblies from NCBI's Assembly database +keywords: + - fasta + - download + - assembly +tools: + - ncbigenomedownload: + description: Download genome files from the NCBI FTP server. + homepage: https://github.com/kblin/ncbi-genome-download + documentation: https://github.com/kblin/ncbi-genome-download + tool_dev_url: https://github.com/kblin/ncbi-genome-download + doi: "" + licence: ['Apache Software License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - accessions: + type: file + description: List of accessions (one per line) to download + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gbk: + type: file + description: GenBank format of the genomic sequence(s) in the assembly + pattern: "*_genomic.gbff.gz" + - fna: + type: file + description: FASTA format of the genomic sequence(s) in the assembly. + pattern: "*_genomic.fna.gz" + - rm: + type: file + description: RepeatMasker output for eukaryotes. + pattern: "*_rm.out.gz" + - features: + type: file + description: Tab-delimited text file reporting locations and attributes for a subset of annotated features + pattern: "*_feature_table.txt.gz" + - gff: + type: file + description: Annotation of the genomic sequence(s) in GFF3 format + pattern: "*_genomic.gff.gz" + - faa: + type: file + description: FASTA format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.faa.gz" + - gpff: + type: file + description: GenPept format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.gpff.gz" + - wgs_gbk: + type: file + description: GenBank flat file format of the WGS master for the assembly + pattern: "*_wgsmaster.gbff.gz" + - cds: + type: file + description: FASTA format of the nucleotide sequences corresponding to all CDS features annotated on the assembly + pattern: "*_cds_from_genomic.fna.gz" + - rna: + type: file + description: FASTA format of accessioned RNA products annotated on the genome assembly + pattern: "*_rna.fna.gz" + - rna_fna: + type: file + description: FASTA format of the nucleotide sequences corresponding to all RNA features annotated on the assembly + pattern: "*_rna_from_genomic.fna.gz" + - report: + type: file + description: Tab-delimited text file reporting the name, role and sequence accession.version for objects in the assembly + pattern: "*_assembly_report.txt" + - stats: + type: file + description: Tab-delimited text file reporting statistics for the assembly + pattern: "*_assembly_stats.txt" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index acf36372..293e333a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -919,6 +919,10 @@ nanoplot: - modules/nanoplot/** - tests/modules/nanoplot/** +ncbigenomedownload: + - modules/ncbigenomedownload/** + - tests/modules/ncbigenomedownload/** + nextclade: - modules/nextclade/** - tests/modules/nextclade/** diff --git a/tests/modules/ncbigenomedownload/main.nf b/tests/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..f729b91d --- /dev/null +++ b/tests/modules/ncbigenomedownload/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' addParams( options: [ args: '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria '] ) + +workflow test_ncbigenomedownload { + + input = [ [ id:'test', single_end:false ] ] + + accessions = [] + + NCBIGENOMEDOWNLOAD ( input, accessions) +} + + diff --git a/tests/modules/ncbigenomedownload/test.yml b/tests/modules/ncbigenomedownload/test.yml new file mode 100644 index 00000000..7d1f7c74 --- /dev/null +++ b/tests/modules/ncbigenomedownload/test.yml @@ -0,0 +1,11 @@ +- name: ncbigenomedownload test_ncbigenomedownload + command: nextflow run tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c tests/config/nextflow.config + tags: + - ncbigenomedownload + files: + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_assembly_stats.txt + md5sum: f78c6a373130e50fac5472962a5fdf44 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.fna.gz + md5sum: b086eb1020e7df022afa545dc6d93297 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.gbff.gz + md5sum: ae2da70e32c783858e6c60c72e9eeb7a From 13b8a16f4a6945af9df146b67972eb70b52e9844 Mon Sep 17 00:00:00 2001 From: tamara-hodgetts <88095902+tamara-hodgetts@users.noreply.github.com> Date: Mon, 15 Nov 2021 19:22:12 +0000 Subject: [PATCH 255/314] Add module get_chrom_sizes (#1063) * hifiasm copied from fastqc * hifiasm tests init from fastqc * meta.yml init; test.yml and main.nf for printing version * Add hifiasm version printing * Removed spaced on an empty line * Reverted hifiasm from main * init getchromsizes * add tests for getchromsizes * Included meta.yml * removed whitespace * Moved getchromsizes to custom folder * Update modules/custom/getchromsizes/main.nf Co-authored-by: Harshil Patel Co-authored-by: Sviatoslav Sidorov Co-authored-by: Svyatoslav Sidorov Co-authored-by: Chris Cheshire Co-authored-by: Tamara Hodgetts Co-authored-by: Harshil Patel --- modules/custom/getchromsizes/functions.nf | 78 +++++++++++++++++++++ modules/custom/getchromsizes/main.nf | 39 +++++++++++ modules/custom/getchromsizes/meta.yml | 39 +++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/custom/getchromsizes/main.nf | 12 ++++ tests/modules/custom/getchromsizes/test.yml | 10 +++ 6 files changed, 182 insertions(+) create mode 100644 modules/custom/getchromsizes/functions.nf create mode 100644 modules/custom/getchromsizes/main.nf create mode 100644 modules/custom/getchromsizes/meta.yml create mode 100644 tests/modules/custom/getchromsizes/main.nf create mode 100644 tests/modules/custom/getchromsizes/test.yml diff --git a/modules/custom/getchromsizes/functions.nf b/modules/custom/getchromsizes/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/custom/getchromsizes/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..fb46986b --- /dev/null +++ b/modules/custom/getchromsizes/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CUSTOM_GETCHROMSIZES { + tag "$fasta" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" + } else { + container "quay.io/biocontainers/samtools:1.14--hb421002_0" + } + + input: + path fasta + + output: + path '*.sizes' , emit: sizes + path '*.fai' , emit: fai + path "versions.yml", emit: versions + + script: + """ + samtools faidx $fasta + cut -f 1,2 ${fasta}.fai > ${fasta}.sizes + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/custom/getchromsizes/meta.yml b/modules/custom/getchromsizes/meta.yml new file mode 100644 index 00000000..eb1db4bb --- /dev/null +++ b/modules/custom/getchromsizes/meta.yml @@ -0,0 +1,39 @@ +name: custom_getchromsizes +description: Generates a FASTA file of chromosome sizes and a fasta index file +keywords: + - fasta + - chromosome + - indexing +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files + homepage: http://www.htslib.org/ + documentation: http://www.htslib.org/doc/samtools.html + tool_dev_url: https://github.com/samtools/samtools + doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] + +input: + - fasta: + type: file + description: FASTA file + pattern: "*.{fasta}" + +output: + - sizes: + type: file + description: File containing chromosome lengths + pattern: "*.{sizes}" + - fai: + type: file + description: FASTA index file + pattern: "*.{fai}" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + + +authors: + - "@tamara-hodgetts" + - "@chris-cheshire" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 293e333a..994b6947 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -318,6 +318,10 @@ custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** +custom/getchromsizes: + - modules/custom/getchromsizes/** + - tests/modules/custom/getchromsizes/** + cutadapt: - modules/cutadapt/** - tests/modules/cutadapt/** diff --git a/tests/modules/custom/getchromsizes/main.nf b/tests/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..503668ec --- /dev/null +++ b/tests/modules/custom/getchromsizes/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' addParams( options: [:] ) + +workflow test_custom_getchromsizes { + + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + CUSTOM_GETCHROMSIZES ( input ) +} diff --git a/tests/modules/custom/getchromsizes/test.yml b/tests/modules/custom/getchromsizes/test.yml new file mode 100644 index 00000000..1265f478 --- /dev/null +++ b/tests/modules/custom/getchromsizes/test.yml @@ -0,0 +1,10 @@ +- name: custom getchromsizes + command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c tests/config/nextflow.config + tags: + - custom + - custom/getchromsizes + files: + - path: output/custom/genome.fasta.fai + md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/custom/genome.fasta.sizes + md5sum: a57c401f27ae5133823fb09fb21c8a3c From c2bba7a65d04fff9a908b2b20eaa89e4aff69078 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 12:43:53 -0700 Subject: [PATCH 256/314] add clonalframeml module (#974) * add clonalframeml module * Update main.nf * try recommended gzip * Update main.nf Co-authored-by: Chris Cheshire --- modules/clonalframeml/functions.nf | 78 ++++++++++++++++++++++++++++ modules/clonalframeml/main.nf | 47 +++++++++++++++++ modules/clonalframeml/meta.yml | 67 ++++++++++++++++++++++++ tests/config/pytest_modules.yml | 6 ++- tests/modules/clonalframeml/main.nf | 14 +++++ tests/modules/clonalframeml/test.yml | 15 ++++++ 6 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 modules/clonalframeml/functions.nf create mode 100644 modules/clonalframeml/main.nf create mode 100644 modules/clonalframeml/meta.yml create mode 100644 tests/modules/clonalframeml/main.nf create mode 100644 tests/modules/clonalframeml/test.yml diff --git a/modules/clonalframeml/functions.nf b/modules/clonalframeml/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/clonalframeml/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf new file mode 100644 index 00000000..f99f944b --- /dev/null +++ b/modules/clonalframeml/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CLONALFRAMEML { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::clonalframeml=1.12" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1" + } else { + container "quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1" + } + + input: + tuple val(meta), path(newick), path(msa) + + output: + tuple val(meta), path("*.emsim.txt") , emit: emsim, optional: true + tuple val(meta), path("*.em.txt") , emit: em + tuple val(meta), path("*.importation_status.txt") , emit: status + tuple val(meta), path("*.labelled_tree.newick") , emit: newick + tuple val(meta), path("*.ML_sequence.fasta") , emit: fasta + tuple val(meta), path("*.position_cross_reference.txt"), emit: pos_ref + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + ClonalFrameML \\ + $newick \\ + <(gzip -cdf $msa) \\ + $prefix \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) + END_VERSIONS + """ +} diff --git a/modules/clonalframeml/meta.yml b/modules/clonalframeml/meta.yml new file mode 100644 index 00000000..874a04be --- /dev/null +++ b/modules/clonalframeml/meta.yml @@ -0,0 +1,67 @@ +name: clonalframeml +description: Predict recomination events in bacterial genomes +keywords: + - fasta + - multiple sequence alignment + - recombination +tools: + - clonalframeml: + description: Efficient inferencing of recombination in bacterial genomes + homepage: https://github.com/xavierdidelot/ClonalFrameML + documentation: https://github.com/xavierdidelot/clonalframeml/wiki + tool_dev_url: https://github.com/xavierdidelot/ClonalFrameML + doi: "10.1371/journal.pcbi.1004041" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - msa: + type: file + description: A multiple seqeunce alignmnet in FASTA format + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - newick: + type: file + description: A Newick formated tree based on multiple sequence alignment + pattern: "*.{newick,treefile,dnd}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - emsim: + type: file + description: Bootstrapped values for the three parameters R/theta, nu and delta + pattern: "*.emsim.txt" + - em: + type: file + description: Point estimates for R/theta, nu, delta and the branch lengths + pattern: "*.em.txt" + - fasta: + type: file + description: Sequence reconstructed by maximum likelihood + pattern: "*.ML_sequence.fasta" + - newick: + type: file + description: Tree with all nodes labelled + pattern: "*.labelled_tree.newick" + - pos_ref: + type: file + description: CSV mapping input sequence files to the sequences in the *.ML_sequence.fasta + pattern: "*.position_cross_reference.txt" + - status: + type: file + description: List of reconstructed recombination events + pattern: "*.importation_status.txt" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 994b6947..7db586b5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -282,6 +282,10 @@ chromap/index: - modules/chromap/index/** - tests/modules/chromap/index/** +clonalframeml: + - modules/clonalframeml/** + - tests/modules/clonalframeml/** + cmseq/polymut: - modules/cmseq/polymut/** - tests/modules/cmseq/polymut/** @@ -1210,7 +1214,7 @@ seacr/callpeak: seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** - + seqsero2: - modules/seqsero2/** - tests/modules/seqsero2/** diff --git a/tests/modules/clonalframeml/main.nf b/tests/modules/clonalframeml/main.nf new file mode 100644 index 00000000..35ecaa79 --- /dev/null +++ b/tests/modules/clonalframeml/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' addParams( options: [:] ) + +workflow test_clonalframeml { + + input = [ [ id:'test' ], // meta map + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.newick", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.fa.gz", checkIfExists: true),] + + CLONALFRAMEML ( input ) +} diff --git a/tests/modules/clonalframeml/test.yml b/tests/modules/clonalframeml/test.yml new file mode 100644 index 00000000..f2b68115 --- /dev/null +++ b/tests/modules/clonalframeml/test.yml @@ -0,0 +1,15 @@ +- name: clonalframeml test_clonalframeml + command: nextflow run tests/modules/clonalframeml -entry test_clonalframeml -c tests/config/nextflow.config + tags: + - clonalframeml + files: + - path: output/clonalframeml/test.ML_sequence.fasta + md5sum: 1b75cdaea78f5920ebb92125422a2589 + - path: output/clonalframeml/test.em.txt + md5sum: 5439d59897a9a90390bb175207bf2b9b + - path: output/clonalframeml/test.importation_status.txt + md5sum: 6ce9dbc7746b1c884af042fa02311fba + - path: output/clonalframeml/test.labelled_tree.newick + md5sum: aa47754eea8a3b6bab56bd7c83ba78db + - path: output/clonalframeml/test.position_cross_reference.txt + md5sum: 8ff60768b348fc6f7a1e787aca72f596 From 52c541b080e8a4735a2ef5d78026d73f6f338624 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 16:43:55 -0500 Subject: [PATCH 257/314] Cooler cload (#634) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * rebuild cooler cload. * update test file path of cload. * add pytest for cload * update to version.yml * update the test data path * Update tests/modules/cooler/cload/main.nf Co-authored-by: Sébastien Guizard * Update modules/cooler/cload/main.nf Co-authored-by: Sébastien Guizard * Update tests/config/test_data.config Co-authored-by: Sébastien Guizard * Update main.nf Remove a lonely curly bracket. * Update test.yml Updated with new workflows. * update the test files * merge the conflicts. * update the test.yml * update for change of cooler/dump Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- modules/cooler/cload/functions.nf | 78 +++++++++++++++++++++++++++++ modules/cooler/cload/main.nf | 47 +++++++++++++++++ modules/cooler/cload/meta.yml | 52 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 8 ++- tests/modules/cooler/cload/main.nf | 52 +++++++++++++++++++ tests/modules/cooler/cload/test.yml | 29 +++++++++++ 7 files changed, 269 insertions(+), 1 deletion(-) create mode 100644 modules/cooler/cload/functions.nf create mode 100644 modules/cooler/cload/main.nf create mode 100644 modules/cooler/cload/meta.yml create mode 100644 tests/modules/cooler/cload/main.nf create mode 100644 tests/modules/cooler/cload/test.yml diff --git a/modules/cooler/cload/functions.nf b/modules/cooler/cload/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/cload/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf new file mode 100644 index 00000000..ec0cad56 --- /dev/null +++ b/modules/cooler/cload/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_CLOAD { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(pairs), path(index) + val cool_bin + path chromsizes + + output: + tuple val(meta), val(cool_bin), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def nproc = options.args.contains('pairix') || options.args.contains('tabix')? "--nproc ${task.cpus}" : '' + + """ + cooler cload \\ + $options.args \\ + $nproc \\ + ${chromsizes}:${cool_bin} \\ + $pairs \\ + ${prefix}.${cool_bin}.cool + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/cload/meta.yml b/modules/cooler/cload/meta.yml new file mode 100644 index 00000000..8ac75911 --- /dev/null +++ b/modules/cooler/cload/meta.yml @@ -0,0 +1,52 @@ +name: cooler_cload +description: Create a cooler from genomic pairs and bins +keywords: + - cool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - pairs: + type: file + description: Path to contacts (i.e. read pairs) file. + - index: + type: file + description: Path to index file of the contacts. + - cool_bin: + type: value + description: Bins size in bp + - chromsizes: + type: file + description: Path to a chromsizes file. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Output COOL file path + pattern: "*.cool" + - cool_bin: + type: value + description: Bins size in bp + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7db586b5..9d6be566 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -298,6 +298,10 @@ cooler/digest: - modules/cooler/digest/** - tests/modules/cooler/digest/** +cooler/cload: + - modules/cooler/cload/** + - tests/modules/cooler/cload/** + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6ac4472c..3351204d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -263,7 +263,13 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } - 'cooler' { + 'cooler'{ + test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" + test_pairix_pair_gz_px2 = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz.px2" + test_pairs_pair = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.sample1.pairs" + test_tabix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz" + test_tabix_pair_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz.tbi" + hg19_chrom_sizes = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.chrom.sizes" test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" diff --git a/tests/modules/cooler/cload/main.nf b/tests/modules/cooler/cload/main.nf new file mode 100644 index 00000000..dd9b3e98 --- /dev/null +++ b/tests/modules/cooler/cload/main.nf @@ -0,0 +1,52 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairix'] ) +include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N'] ) +include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'tabix'] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_cload_pairix { + + input = [ [ id:'test_pairix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_pairix_pair_gz_px2'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD ( input, bin_size, sizes ) + COOLER_DUMP(COOLER_CLOAD.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_pairs { + + input = [ [ id:'test_pairs', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairs_pair'], checkIfExists: true), + []] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_PAIRS ( input, bin_size, sizes ) + COOLER_DUMP_PAIRS(COOLER_CLOAD_PAIRS.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_tabix { + + input = [ [ id:'test_tabix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_tabix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_tabix_pair_gz_tbi'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_TABIX ( input, bin_size, sizes ) + COOLER_DUMP_TABIX(COOLER_CLOAD_TABIX.out.cool.map{[it[0], it[2]]}, []) + +} diff --git a/tests/modules/cooler/cload/test.yml b/tests/modules/cooler/cload/test.yml new file mode 100644 index 00000000..7cb9a0bd --- /dev/null +++ b/tests/modules/cooler/cload/test.yml @@ -0,0 +1,29 @@ +- name: cooler cload test_cooler_cload_pairix + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairix -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairix.2000000.cool + - path: output/cooler/test_pairix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 + +- name: cooler cload test_cooler_cload_pairs + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairs -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairs.2000000.cool + - path: output/cooler/test_pairs.bedpe + md5sum: 7f832733fc7853ebb1937b33e4c1e0de + +- name: cooler cload test_cooler_cload_tabix + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_tabix -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_tabix.2000000.cool + - path: output/cooler/test_tabix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 From d5f69856072cf366bc1e023c9f89bc6e738e4904 Mon Sep 17 00:00:00 2001 From: Sateesh <33637490+sateeshperi@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:05:34 -0500 Subject: [PATCH 258/314] add new nucmer module (#945) * add new nucmer module * Apply suggestions from code review Co-authored-by: Robert A. Petit III * update tests with file produced by input * Update main.nf * Update meta.yml Co-authored-by: Michael Cipriano Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/nucmer/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/nucmer/main.nf | 55 +++++++++++++++++++++++ modules/nucmer/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/nucmer/main.nf | 14 ++++++ tests/modules/nucmer/test.yml | 9 ++++ 6 files changed, 210 insertions(+) create mode 100644 modules/nucmer/functions.nf create mode 100644 modules/nucmer/main.nf create mode 100644 modules/nucmer/meta.yml create mode 100644 tests/modules/nucmer/main.nf create mode 100644 tests/modules/nucmer/test.yml diff --git a/modules/nucmer/functions.nf b/modules/nucmer/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/nucmer/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf new file mode 100644 index 00000000..49a275f4 --- /dev/null +++ b/modules/nucmer/main.nf @@ -0,0 +1,55 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NUCMER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" + } else { + container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" + } + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.delta") , emit: delta + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + + nucmer \\ + -p $prefix \\ + --coords \\ + $options.args \\ + $fasta_name_ref \\ + $fasta_name_query + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) + END_VERSIONS + """ +} diff --git a/modules/nucmer/meta.yml b/modules/nucmer/meta.yml new file mode 100644 index 00000000..cccf723f --- /dev/null +++ b/modules/nucmer/meta.yml @@ -0,0 +1,50 @@ +name: nucmer +description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. +keywords: + - align + - nucleotide +tools: + - nucmer: + description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: "https://doi.org/10.1186/gb-2004-5-2-r12" + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - delta: + type: file + description: File containing coordinates of matches between reference and query + - coords: + type: file + description: NUCmer1.1 coords output file + pattern: "*.{coords}" + +authors: + - "@sateeshperi" + - "@mjcipriano" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9d6be566..6c32a0ff 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -943,6 +943,10 @@ ngmaster: - modules/ngmaster/** - tests/modules/ngmaster/** +nucmer: + - modules/nucmer/** + - tests/modules/nucmer/** + optitype: - modules/optitype/** - tests/modules/optitype/** diff --git a/tests/modules/nucmer/main.nf b/tests/modules/nucmer/main.nf new file mode 100644 index 00000000..8021f577 --- /dev/null +++ b/tests/modules/nucmer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NUCMER } from '../../../modules/nucmer/main.nf' addParams( options: [:] ) + +workflow test_nucmer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + NUCMER ( input ) +} diff --git a/tests/modules/nucmer/test.yml b/tests/modules/nucmer/test.yml new file mode 100644 index 00000000..86b3df5d --- /dev/null +++ b/tests/modules/nucmer/test.yml @@ -0,0 +1,9 @@ +- name: nucmer test_nucmer + command: nextflow run tests/modules/nucmer -entry test_nucmer -c tests/config/nextflow.config + tags: + - nucmer + files: + - path: output/nucmer/test.coords + contains: ['MT192765.1'] + - path: output/nucmer/test.delta + contains: ['MT192765.1'] From bc8899f1bf298185a444511d73cdb9dd21ac10f4 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 16 Nov 2021 02:23:07 -0700 Subject: [PATCH 259/314] [fix] hicap module allow optional outputs (#937) * make hicap outputs optional * update test data * typo * Update main.nf * use mkfifo * Update main.nf Co-authored-by: Gisela Gabernet Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/hicap/main.nf | 7 +++---- tests/modules/hicap/main.nf | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index e2e70678..fbc157b1 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -24,9 +24,9 @@ process HICAP { path model_fp output: - tuple val(meta), path("*.gbk"), emit: gbk - tuple val(meta), path("*.svg"), emit: svg - tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.gbk"), emit: gbk, optional: true + tuple val(meta), path("*.svg"), emit: svg, optional: true + tuple val(meta), path("*.tsv"), emit: tsv, optional: true path "versions.yml" , emit: versions script: @@ -39,7 +39,6 @@ process HICAP { if [ "$is_compressed" == "true" ]; then gzip -c -d $fasta > $fasta_name fi - hicap \\ --query_fp $fasta_name \\ $database_args \\ diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf index 77c309a5..3ac9c20b 100644 --- a/tests/modules/hicap/main.nf +++ b/tests/modules/hicap/main.nf @@ -7,7 +7,7 @@ include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] workflow test_hicap { input = [ [ id:'test', single_end:false ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species-specific/haemophilus-influenzae/GCF_900478275.fna.gz", checkIfExists: true) ] + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/GCF_900478275.fna.gz", checkIfExists: true) ] database_dir = [] model_fp = [] From b50f4e3d59810447c10520c8dc9026c96d7c6470 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Tue, 16 Nov 2021 05:37:27 -0600 Subject: [PATCH 260/314] Update dsh-bio to 2.0.6. (#1075) Co-authored-by: Harshil Patel --- modules/dshbio/exportsegments/main.nf | 6 +++--- modules/dshbio/filterbed/main.nf | 6 +++--- modules/dshbio/filtergff3/main.nf | 6 +++--- modules/dshbio/splitbed/main.nf | 6 +++--- modules/dshbio/splitgff3/main.nf | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 84f59e89..ec471000 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -11,11 +11,11 @@ process DSHBIO_EXPORTSEGMENTS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 35039f21..9ad8ce8b 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bf677da8..bf729dbf 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 3e8d656c..20e679f4 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index dd477181..e0312a19 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: From 691feeafdcd8b7874f540695ece122cce01a524f Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Tue, 16 Nov 2021 15:09:30 +0100 Subject: [PATCH 261/314] GATK4 SplitNCigarReads: fasta_fai_dict tuple is now split into separate input channels (#1076) * fasta_fai_dict tuple is now split into separate input channels * fix: lint errors * fix: pytest errors * Update modules/gatk4/splitncigarreads/meta.yml * Update modules/gatk4/splitncigarreads/main.nf Co-authored-by: Maxime U. Garcia --- modules/gatk4/splitncigarreads/main.nf | 4 +++- modules/gatk4/splitncigarreads/meta.yml | 15 +++++++++++---- tests/modules/gatk4/splitncigarreads/main.nf | 9 ++++----- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 01b1d05a..26fb799d 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -20,7 +20,9 @@ process GATK4_SPLITNCIGARREADS { input: tuple val(meta), path(bam) - tuple path(fasta), path(fai), path(dict) + path fasta + path fai + path dict output: tuple val(meta), path('*.bam'), emit: bam diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index f287ede4..fd6edda0 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -24,10 +24,17 @@ input: description: BAM/SAM/CRAM file containing reads pattern: "*.{bam,sam,cram}" - fasta: - type: tuple of files - description: | - Tuple of fasta file (first), sequence dict (second) and fasta index (third) - pattern: ["*.fasta", "*.dict", "*.fai"] + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" output: - bam: type: file diff --git a/tests/modules/gatk4/splitncigarreads/main.nf b/tests/modules/gatk4/splitncigarreads/main.nf index 3e6bde80..0934593f 100644 --- a/tests/modules/gatk4/splitncigarreads/main.nf +++ b/tests/modules/gatk4/splitncigarreads/main.nf @@ -8,10 +8,9 @@ workflow test_gatk4_splitncigarreads { input = [ [ id:'test' ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] ] - fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - GATK4_SPLITNCIGARREADS ( input, fasta ) + GATK4_SPLITNCIGARREADS ( input, fasta, fai, dict ) } From 2d4549122be989b36fe5299b292f4032601c865d Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Tue, 16 Nov 2021 08:49:57 -0600 Subject: [PATCH 262/314] feat: Add main.nf (#1077) Allows for launching of subworkflows --- main.nf | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 main.nf diff --git a/main.nf b/main.nf new file mode 100644 index 00000000..de12f619 --- /dev/null +++ b/main.nf @@ -0,0 +1,3 @@ +/* + * not actually used - just a placeholder + */ From 071b1d50a8272037f0c9c8485c18d3953ac703d6 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Wed, 17 Nov 2021 10:07:17 +0000 Subject: [PATCH 263/314] Add gatk somatic tumour calling subworkflow (#1064) * initial commit to set up new branch * save changes to checkout * workflow working, still needs test.yml and meta.yml, also fix versions file * subworkflow finished * Update pytest_subworkflows.yml * Update pytest_subworkflows.yml * Update pytest_subworkflows.yml * fix config subworkflow name * Update main.nf * Update pytest_subworkflows.yml * fixed md5sum issue likely caused by gatk version update * tumour changed to tumor * old dir deleted * Comments added to explain use of placeholders '[]' * updated index names, input channel renamed to input * Apply suggestions from code review * updated to perform new subworkflow testing Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia --- .../main.nf | 88 ++++++++++++++ .../meta.yml | 108 ++++++++++++++++++ .../nextflow.config | 4 + tests/config/pytest_modules.yml | 16 ++- .../main.nf | 24 ++++ .../test.yml | 28 +++++ 6 files changed, 264 insertions(+), 4 deletions(-) create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf new file mode 100644 index 00000000..20d8a176 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf @@ -0,0 +1,88 @@ +// +// Run GATK mutect2 in tumor only mode, getepileupsummaries, calculatecontamination and filtermutectcalls +// + +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + mutect2_input = channel.from(input) + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + MUTECT2 ( mutect2_input , true , false , false , [] , fasta , fai , dict , germline_resource , germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate pileup summary table using getepileupsummaries. + // + pileup_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + GETPILEUPSUMMARIES ( pileup_input , germline_resource , germline_resource_tbi , interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup = GETPILEUPSUMMARIES.out.table.collect() + //[] is a placeholder for the optional input where the matched normal sample would be passed in for tumor-normal samples, which is not necessary for this workflow. + ch_pileup.add([]) + CALCULATECONTAMINATION ( ch_pileup, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + //[] is added as a placeholder for the optional input file artifact priors, which is only used for tumor-normal samples and therefor isn't needed in this workflow. + ch_stats.add([]) + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is added as a placeholder for entering a contamination estimate value, which is not needed as this workflow uses the contamination table instead. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + pileup_table = GETPILEUPSUMMARIES.out.table.collect() // channel: [ val(meta), [ table ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_index = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..14329691 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml @@ -0,0 +1,108 @@ +name: gatk_tumor_only_somatic_variant_calling +description: | + Perform variant calling on a single tumor sample using mutect2 tumor only mode. + Run the input bam file through getpileupsummarries and then calculatecontaminationto get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing one BAM file, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing one BAM file indexe, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - pileup_table: + type: file + description: File containing the pileup summary table. + pattern: "*.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..af50c2b0 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config @@ -0,0 +1,4 @@ +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6c32a0ff..29d07639 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,7 +474,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: +gatk4/calculatecontamination: &gatk4/calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -494,7 +494,7 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: +gatk4/filtermutectcalls: &gatk4/filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** @@ -506,7 +506,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: +gatk4/getpileupsummaries: &gatk4/getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -538,7 +538,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: +gatk4/mutect2: &gatk4/mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -1426,3 +1426,11 @@ subworkflows/gatk_create_som_pon: - tests/subworkflows/nf-core/gatk_create_som_pon/** - *gatk4/genomicsdbimport - *gatk4/createsomaticpanelofnormals + +subworkflows/gatk_tumor_only_somatic_variant_calling: + - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** + - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** + - *gatk4/mutect2 + - *gatk4/getpileupsummaries + - *gatk4/calculatecontamination + - *gatk4/filtermutectcalls diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf new file mode 100644 index 00000000..988dc31e --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING } from '../../../../subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main' addParams( [:] ) + +workflow test_gatk_tumor_only_somatic_variant_calling { + input = [ + [[ id:'test' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi, interval_file ) +} diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml new file mode 100644 index 00000000..797ae936 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml @@ -0,0 +1,28 @@ +- name: gatk_tumor_only_somatic_variant_calling + command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling -entry test_gatk_tumor_only_somatic_variant_calling -c tests/config/nextflow.config + tags: + - subworkflows/gatk_tumor_only_somatic_variant_calling + # Modules + # - gatk4/mutect2 + # - gatk4/getpileupsummaries + # - gatk4/calculatecontamination + # - gatk4/filtermutectcalls + files: + # gatk4 mutect2 + - path: ./output/mutect2/test.vcf.gz + - path: ./output/mutect2/test.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 + - path: ./output/mutect2/test.vcf.gz.tbi + # gatk4 getpileupsummaries + - path: ./output/getpileupsummaries/test.pileups.table + md5sum: 8b1b4c8ab831eca50ee9e940463a741f + # gatk4 calculatecontamination + - path: ./output/calculatecontamination/test.contamination.table + md5sum: 5fdcf1728cf98985ce31c038eb24e05c + - path: ./output/calculatecontamination/test.segmentation.table + md5sum: 91f28bfe4727a3256810927fc5eba92f + # gatk4 filtermutectcalls + - path: ./output/filtermutectcalls/test_filtered.vcf.gz + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.filteringStats.tsv + md5sum: 8731945490960546719ce4a71a151e4f + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.tbi From 5b975cc20da2ebbc197be2203926f59d6a668153 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Wed, 17 Nov 2021 10:34:07 +0000 Subject: [PATCH 264/314] Add gatk somatic paired calling subworkflow (#1067) * initial commit to setup branch * workflow finished * Update nextflow.config * tumour to tumor, getpileup passed as nomral and tumor * paired_somatic renamed to tumor_normal_somatic * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Update subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf Co-authored-by: Maxime U. Garcia * updated index names in meta.yml * changed index file names in main script and test * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Apply suggestions from code review * fixed bug from changes * Apply suggestions from code review * tests should now work after the yml update * Update pytest_modules.yml Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia Co-authored-by: Maxime U. Garcia --- .../main.nf | 109 +++++++++++++++ .../meta.yml | 127 ++++++++++++++++++ .../nextflow.config | 6 + tests/config/pytest_modules.yml | 11 +- .../main.nf | 25 ++++ .../test.yml | 34 +++++ 6 files changed, 311 insertions(+), 1 deletion(-) create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf new file mode 100644 index 00000000..25c63687 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf @@ -0,0 +1,109 @@ +// +// Run GATK mutect2 in tumor normal mode, getepileupsummaries, calculatecontamination, learnreadorientationmodel and filtermutectcalls +// + +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [suffix: '_tumor'] +params.getpileup_normal_options = [suffix: '_normal'] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_LEARNREADORIENTATIONMODEL as LEARNREADORIENTATIONMODEL } from '../../../modules/gatk4/learnreadorientationmodel/main' addParams( options: params.learnorientation_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_TUMOR } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_tumor_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_NORMAL} from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_normal_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [which_norm] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + mutect2_input = channel.from(input) + MUTECT2 ( mutect2_input, false, false, false, [], fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate artifactpriors using learnreadorientationmodel on the f1r2 output of mutect2. + // + ch_learnread_in = MUTECT2.out.f1r2.collect() + LEARNREADORIENTATIONMODEL (ch_learnread_in) + ch_versions = ch_versions.mix(LEARNREADORIENTATIONMODEL.out.versions) + + // + //Generate pileup summary tables using getepileupsummaries. tumor sample should always be passed in as the first input and input list entries of ch_mutect2_in, + //to ensure correct file order for calculatecontamination. + // + pileup_tumor_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + + pileup_normal_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[1], input_index[1]] + } + GETPILEUPSUMMARIES_TUMOR ( pileup_tumor_input, germline_resource, germline_resource_tbi, interval_file ) + GETPILEUPSUMMARIES_NORMAL ( pileup_normal_input, germline_resource, germline_resource_tbi, interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES_NORMAL.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() + ch_pileup_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() + ch_calccon_in = ch_pileup_tumor.combine(ch_pileup_normal, by: 0) + CALCULATECONTAMINATION ( ch_calccon_in, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the artifactpriors, contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + ch_orientation = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is used as a placeholder for optional input to specify the contamination estimate as a value, since the contamination table is used, this is not needed. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_orientation, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_tbi = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + mutect2_f1r2 = MUTECT2.out.f1r2.collect() // channel: [ val(meta), [ f1r2 ] ] + + artifact_priors = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() // channel: [ val(meta), [ artifactprior ] ] + + pileup_table_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() // channel: [ val(meta), [ table_tumor ] ] + pileup_table_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() // channel: [ val(meta), [ table_normal ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_tbi = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..4c42addf --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml @@ -0,0 +1,127 @@ +name: gatk_tumor_normal_somatic_variant_calling +description: | + Perform variant calling on a paired tumor normal set of samples using mutect2 tumor normal mode. + f1r2 output of mutect2 is run through learnreadorientationmodel to get the artifact priors. + Run the input bam files through getpileupsummarries and then calculatecontamination to get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls, artifact priors and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - learnreadorientationmodel + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/learnreadorientationmodel + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing the tumor and normal BAM files, in that order, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing the tumor and normal BAM file indexes, in that order, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - which_norm: + type: list + description: optional list of sample headers contained in the normal sample input file. + pattern: "testN" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - mutect2_f1r2: + type: file + description: file containing information to be passed to LearnReadOrientationModel. + pattern: "*.f1r2.tar.gz" + - artifact_priors: + type: file + description: file containing artifact-priors to be used by filtermutectcalls. + pattern: "*.tar.gz" + - pileup_table_tumor: + type: file + description: File containing the tumor pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_tumor.pileups.table" + - pileup_table_normal: + type: file + description: File containing the normal pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_normal.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..bb8d1bc4 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config @@ -0,0 +1,6 @@ +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [:] +params.getpileup_normal_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 29d07639..9ed9f55c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -522,7 +522,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: +gatk4/learnreadorientationmodel: &gatk4/learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -1427,6 +1427,15 @@ subworkflows/gatk_create_som_pon: - *gatk4/genomicsdbimport - *gatk4/createsomaticpanelofnormals +subworkflows/gatk_tumor_normal_somatic_variant_calling: + - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** + - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** + - *gatk4/mutect2 + - *gatk4/learnreadorientationmodel + - *gatk4/getpileupsummaries + - *gatk4/calculatecontamination + - *gatk4/filtermutectcalls + subworkflows/gatk_tumor_only_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf new file mode 100644 index 00000000..21e35998 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING } from '../../../../subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main' addParams( [:] ) + +workflow test_gatk_tumor_normal_somatic_variant_calling { + input = [ + [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + ["testN"] + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi, interval_file ) +} diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml new file mode 100644 index 00000000..3c6753fb --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml @@ -0,0 +1,34 @@ +- name: gatk_tumor_normal_somatic_variant_calling + command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling -entry test_gatk_tumor_normal_somatic_variant_calling -c tests/config/nextflow.config + tags: + - subworkflows/gatk_tumor_normal_somatic_variant_calling + # Modules + # - gatk4/mutect2 + # - gatk4/learnreadorientationmodel + # - gatk4/getpileupsummaries + # - gatk4/calculatecontamination + # - gatk4/filtermutectcalls + files: + # gatk4 mutect2 + - path: ./output/mutect2/test.vcf.gz + - path: ./output/mutect2/test.vcf.gz.stats + md5sum: 6ecb874e6a95aa48233587b876c2a7a9 + - path: ./output/mutect2/test.vcf.gz.tbi + - path: ./output/mutect2/test.f1r2.tar.gz + # gatk4 learnreadorientationmodel + - path: ./output/learnreadorientationmodel/test.tar.gz + # gatk4 getpileupsummaries + - path: ./output/getpileupsummaries/test_tumor.pileups.table + md5sum: 8b1b4c8ab831eca50ee9e940463a741f + - path: ./output/getpileupsummaries/test_normal.pileups.table + md5sum: 0d19674bef2ff0700d5b02b3463dd210 + # gatk4 calculatecontamination + - path: ./output/calculatecontamination/test.contamination.table + md5sum: 5fdcf1728cf98985ce31c038eb24e05c + - path: ./output/calculatecontamination/test.segmentation.table + md5sum: 91f28bfe4727a3256810927fc5eba92f + # gatk4 filtermutectcalls + - path: ./output/filtermutectcalls/test_filtered.vcf.gz + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.filteringStats.tsv + md5sum: 98e1b87a52999eb8f429ef4a7877eb3f + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.tbi From f052dc445c8f1f6791ae0a15530300a94b9f7d2c Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 18 Nov 2021 11:09:09 -0700 Subject: [PATCH 265/314] use underscores in anchors and references (#1080) * use underscores in anchors and references * Dummy change to trigger CI * use dev branch * underscore anchor --- .github/workflows/nf-core-linting.yml | 2 +- .../nf-core/gatk_create_som_pon/main.nf | 1 - tests/config/pytest_modules.yml | 60 +++++++++---------- 3 files changed, 31 insertions(+), 32 deletions(-) diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index ce441413..55b8c296 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -60,7 +60,7 @@ jobs: # FIXME: Remove this when nf-core modules lint stabilizes and install stable release - name: Install nf-core tools development version - run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@subworkflow_hacks + run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev - name: Install Nextflow env: diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf index 40269a4a..89a9566e 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -1,7 +1,6 @@ // // Run GATK mutect2, genomicsdbimport and createsomaticpanelofnormals // - params.mutect2_options = [args: '--max-mnp-distance 0'] params.gendbimport_options = [:] params.createsompon_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9ed9f55c..daa48bc2 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,7 +474,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: &gatk4/calculatecontamination +gatk4/calculatecontamination: &gatk4_calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -482,7 +482,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: &gatk4/createsomaticpanelofnormals +gatk4/createsomaticpanelofnormals: &gatk4_createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -494,11 +494,11 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: &gatk4/filtermutectcalls +gatk4/filtermutectcalls: &gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: &gatk4/genomicsdbimport +gatk4/genomicsdbimport: &gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -506,7 +506,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: &gatk4/getpileupsummaries +gatk4/getpileupsummaries: &gatk4_getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -522,7 +522,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: &gatk4/learnreadorientationmodel +gatk4/learnreadorientationmodel: &gatk4_learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -538,7 +538,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: &gatk4/mutect2 +gatk4/mutect2: &gatk4_mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -1187,7 +1187,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: &samtools/index +samtools/index: &samtools_index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1199,7 +1199,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: &samtools/sort +samtools/sort: &samtools_sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1275,11 +1275,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: &sratools/fasterqdump +sratools/fasterqdump: &sratools_fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: &sratools/prefetch +sratools/prefetch: &sratools_prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1399,47 +1399,47 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** -subworkflows/bam_stats_samtools: &subworkflows/bam_stats_samtools +subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools - subworkflows/nf-core/bam_stats_samtools/** - tests/subworkflows/nf-core/bam_stats_samtools/** -subworkflows/bam_sort_samtools: &subworkflows/bam_sort_samtools +subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools - subworkflows/nf-core/bam_sort_samtools/** - tests/subworkflows/nf-core/bam_sort_samtools/** - - *samtools/sort - - *samtools/index - - *subworkflows/bam_stats_samtools + - *samtools_sort + - *samtools_index + - *subworkflows_bam_stats_samtools subworkflows/align_bowtie2: - subworkflows/nf-core/align_bowtie2/** - tests/subworkflows/nf-core/align_bowtie2/** - - *subworkflows/bam_sort_samtools + - *subworkflows_bam_sort_samtools subworkflows/sra_fastq: - subworkflows/nf-core/sra_fastq/** - tests/subworkflows/nf-core/sra_fastq/** - - *sratools/fasterqdump - - *sratools/prefetch + - *sratools_fasterqdump + - *sratools_prefetch subworkflows/gatk_create_som_pon: - subworkflows/nf-core/gatk_create_som_pon/** - tests/subworkflows/nf-core/gatk_create_som_pon/** - - *gatk4/genomicsdbimport - - *gatk4/createsomaticpanelofnormals + - *gatk4_genomicsdbimport + - *gatk4_createsomaticpanelofnormals subworkflows/gatk_tumor_normal_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - *gatk4/mutect2 - - *gatk4/learnreadorientationmodel - - *gatk4/getpileupsummaries - - *gatk4/calculatecontamination - - *gatk4/filtermutectcalls + - *gatk4_mutect2 + - *gatk4_learnreadorientationmodel + - *gatk4_getpileupsummaries + - *gatk4_calculatecontamination + - *gatk4_filtermutectcalls subworkflows/gatk_tumor_only_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - *gatk4/mutect2 - - *gatk4/getpileupsummaries - - *gatk4/calculatecontamination - - *gatk4/filtermutectcalls + - *gatk4_mutect2 + - *gatk4_getpileupsummaries + - *gatk4_calculatecontamination + - *gatk4_filtermutectcalls From 43a1c1c6d18d7d9395846503046f1649ba72a06e Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Thu, 18 Nov 2021 21:47:40 +0100 Subject: [PATCH 266/314] Update in GATK4 variantfiltration: Added vcf_index to vcf tuple; output to vcf.gz format. (#1083) * Added vcf_index to vcf tuple; output to vcf.gz format. * Fix: extra new line in meta.yml. * addressed review feedback * fix: editorconfig error * fix: gatk memory flag * fix: editorconfig error * fix: Indentation fix: Indentation * Fix: lint editorconfig error Removed one extra space Co-authored-by: Robert A. Petit III --- modules/gatk4/variantfiltration/main.nf | 20 ++++++++---- modules/gatk4/variantfiltration/meta.yml | 16 +++++++--- tests/modules/gatk4/variantfiltration/main.nf | 32 +++++++++++++++---- .../modules/gatk4/variantfiltration/test.yml | 20 +++++++++--- 4 files changed, 65 insertions(+), 23 deletions(-) diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index a4e950ae..e0f0727a 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -19,23 +19,29 @@ process GATK4_VARIANTFILTRATION { } input: - tuple val(meta), path(vcf) + tuple val(meta), path(vcf), path(vcf_tbi) path fasta path fai path dict output: - tuple val(meta), path("*.vcf"), emit: vcf - path "versions.yml" , emit: versions - + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.toGiga() + } """ - gatk VariantFiltration \\ + gatk --java-options "-Xmx${avail_mem}G" VariantFiltration \\ -R $fasta \\ -V $vcf \\ - -O ${prefix}.vcf \\ + -O ${prefix}.vcf.gz \\ $options.args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index 6d4983a6..71f0b8b2 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -21,8 +21,12 @@ input: e.g. [ id:'test'] - vcf: type: list - description: Input VCF file - pattern: "*.{vcf}" + description: List of VCF(.gz) files + pattern: "*.{vcf,vcf.gz}" + - vcf_tbi: + type: list + description: List of VCF file indexes + pattern: "*.{idx,tbi}" - fasta: type: file description: Fasta file of reference genome @@ -38,8 +42,12 @@ input: output: - vcf: type: file - description: filtered VCF file - pattern: "*.filtered.{vcf}" + description: Compressed VCF file + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of VCF file + pattern: "*.vcf.gz.tbi" - versions: type: file description: File containing software versions diff --git a/tests/modules/gatk4/variantfiltration/main.nf b/tests/modules/gatk4/variantfiltration/main.nf index 04bebf6f..67c9daec 100644 --- a/tests/modules/gatk4/variantfiltration/main.nf +++ b/tests/modules/gatk4/variantfiltration/main.nf @@ -5,14 +5,32 @@ nextflow.enable.dsl = 2 test_options = ['args': '--filter-name "test_filter" --filter-expression "MQ0 > 0"', 'suffix': '.filtered'] include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' addParams( options: test_options ) -workflow test_gatk4_variantfiltration { +// Basic parameters with uncompressed VCF input +workflow test_gatk4_variantfiltration_vcf_input { + input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] - fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - fai = [ file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) ] - genome_dict = [ file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fai, genome_dict ) + GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) } + +// Basic parameters with compressed VCF input +workflow test_gatk4_variantfiltration_gz_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) +} + + diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index 1a2bf6d2..e3177cfc 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -1,9 +1,19 @@ -- name: gatk4 variantfiltration test_gatk4_variantfiltration - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration -c tests/config/nextflow.config +- name: gatk4 variantfiltration test_gatk4_variantfiltration_vcf_input + command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c tests/config/nextflow.config tags: - gatk4/variantfiltration - gatk4 files: - - path: output/gatk4/test.filtered.vcf - contains: - - "AC=2;AN=2;DP=1;DP4=0,0,1,0;MQ=60;MQ0F=0;SGB=-0.379885" + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi + +- name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input + command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c tests/config/nextflow.config + tags: + - gatk4/variantfiltration + - gatk4 + files: + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi From 4e5b6ed843ee0691848aba58088e4347cd3aae98 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 18 Nov 2021 21:54:14 +0100 Subject: [PATCH 267/314] Fix picard markduplicates (#1084) * Fix picard/markduplicates with new options syntax * Delete md5sum for bam files and add contains for metrics.txt Co-authored-by: Robert A. Petit III Co-authored-by: FriederikeHanssen --- modules/picard/markduplicates/main.nf | 6 +++--- tests/modules/picard/markduplicates/main.nf | 2 +- tests/modules/picard/markduplicates/test.yml | 6 ++++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 37b825d7..130a1e52 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -40,9 +40,9 @@ process PICARD_MARKDUPLICATES { -Xmx${avail_mem}g \\ MarkDuplicates \\ $options.args \\ - -I $bam \\ - -O ${prefix}.bam \\ - -M ${prefix}.MarkDuplicates.metrics.txt + I=$bam \\ + O=${prefix}.bam \\ + M=${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 78643f8b..7c9c63cd 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) -include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : '--ASSUME_SORT_ORDER queryname' ] ) +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : 'ASSUME_SORT_ORDER=queryname' ] ) workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 04075548..4c314814 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -5,8 +5,9 @@ - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: b520ccdc3a9edf3c6a314983752881f2 - name: picard markduplicates unsorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config tags: @@ -14,6 +15,7 @@ - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: 46a6fc76048ba801d328f869ac9db020 From 19035c99d1cd8412b5046c6bb4af787658b84e5b Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Thu, 18 Nov 2021 22:24:47 +0100 Subject: [PATCH 268/314] Add thread option to samtools modules (#1069) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add thread option to samtools modules * fix tests * fix tests * fix tests * Fix naming to fix tests * cpus-1 to account for mainthread * remove thread in ampliconclip, docu doesn't report this param * add -1 to all other applicable samtools modules * Update samtools version * Update checksums * retrigger GHA after update * Update modules/samtools/fastq/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/fixmate/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/flagstat/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/index/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/merge/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/stats/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/view/main.nf Co-authored-by: Patrick Hüther * Fix md5sum fixmate * Fix md5sums * sth funny with the fixmate checksums * more md5sums updates Co-authored-by: Patrick Hüther --- modules/samtools/ampliconclip/main.nf | 7 +++---- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/fastq/main.nf | 10 +++++----- modules/samtools/fixmate/main.nf | 2 +- modules/samtools/flagstat/main.nf | 8 ++++---- modules/samtools/idxstats/main.nf | 6 +++--- modules/samtools/index/main.nf | 9 +++++---- modules/samtools/merge/main.nf | 9 +++++---- modules/samtools/mpileup/main.nf | 6 +++--- modules/samtools/sort/main.nf | 6 +++--- modules/samtools/stats/main.nf | 9 +++++---- modules/samtools/view/main.nf | 15 ++++++++------- tests/modules/samtools/ampliconclip/test.yml | 12 ++++++------ tests/modules/samtools/fixmate/test.yml | 4 ++-- tests/modules/samtools/index/test.yml | 6 +++--- tests/modules/samtools/merge/test.yml | 4 ++-- tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/main.nf | 6 +++--- tests/modules/samtools/stats/test.yml | 8 ++++---- .../subworkflows/nf-core/align_bowtie2/test.yml | 4 ++-- .../nf-core/bam_sort_samtools/test.yml | 16 ++++++++-------- 21 files changed, 79 insertions(+), 76 deletions(-) diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 3da1d6fe..4cf98d3f 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_AMPLICONCLIP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.13" : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -38,7 +38,6 @@ process SAMTOOLS_AMPLICONCLIP { samtools \\ ampliconclip \\ $options.args \\ - -@ $task.cpus \\ $rejects \\ $stats \\ -b $bed \\ diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 80cedeab..80708084 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FAIDX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 0b454360..fb7e3904 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -23,7 +23,7 @@ process SAMTOOLS_FASTQ { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" @@ -32,7 +32,7 @@ process SAMTOOLS_FASTQ { """ samtools fastq \\ $options.args \\ - -@ $task.cpus \\ + --threads ${task.cpus-1} \\ $endedness \\ $bam cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index e1a766a1..af1cf829 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -33,7 +33,7 @@ process SAMTOOLS_FIXMATE { samtools \\ fixmate \\ $options.args \\ - -@ $task.cpus \\ + --threads ${task.cpus-1} \\ $bam \\ ${prefix}.bam \\ diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index f9115c6b..072a135f 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FLAGSTAT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -27,7 +27,7 @@ process SAMTOOLS_FLAGSTAT { script: """ - samtools flagstat $bam > ${bam}.flagstat + samtools flagstat --threads ${task.cpus-1} $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index b005088a..fa0e7dc3 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_IDXSTATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 62254bc8..d66e4513 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_INDEX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -29,7 +29,8 @@ process SAMTOOLS_INDEX { script: """ - samtools index $options.args $input + samtools index -@ ${task.cpus-1} $options.args $input + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index fefb423b..ab641bb9 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MERGE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -32,7 +32,8 @@ process SAMTOOLS_MERGE { def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge ${reference} ${prefix}.${file_type} $input_files + samtools merge --threads ${task.cpus-1} $options.args ${reference} ${prefix}.${file_type} $input_files + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 9e120526..081682ed 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MPILEUP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index b30f6f45..f980b472 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_SORT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index aab43410..e0a2b50d 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_STATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -29,7 +29,8 @@ process SAMTOOLS_STATS { script: def reference = fasta ? "--reference ${fasta}" : "" """ - samtools stats ${reference} ${input} > ${input}.stats + samtools stats --threads ${task.cpus-1} ${reference} ${input} > ${input}.stats + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index b7a047ee..e5ff5546 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_VIEW { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -23,16 +23,17 @@ process SAMTOOLS_VIEW { path fasta output: - tuple val(meta), path("*.bam") , optional: true, emit: bam - tuple val(meta), path("*.cram"), optional: true, emit: cram - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam") , emit: bam , optional: true + tuple val(meta), path("*.cram"), emit: cram, optional: true + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ - samtools view ${reference} $options.args $input > ${prefix}.${file_type} + samtools view --threads ${task.cpus-1} ${reference} $options.args $input > ${prefix}.${file_type} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index 9e41ce5b..9e8e1f9f 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -7,7 +7,7 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 1c705ebe39f68f1dac164733ae99c9d2 + md5sum: 678f9ab04fbe3206f0f96e170fd833e9 - name: samtools ampliconclip no stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config @@ -16,9 +16,9 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 86c7bfb5378d57b16855c5b399000b2a + md5sum: bbf65ea626539d96c8271e17d1fc988b - path: output/samtools/test.cliprejects.bam - md5sum: 8e2eea2c0005b4d4e77c0eb549599133 + md5sum: a0bee15aead020d16d0c81bd9667df46 - name: samtools ampliconclip with stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config @@ -27,8 +27,8 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: d96f5eebef0ff4635e68090e89756d4a + md5sum: f5a3611ecad34ba2dde77096e1c7dd93 - path: output/samtools/test.cliprejects.bam - md5sum: ad83a523d6ff1c58caade4ddafbaaed7 + md5sum: 90ee7ce908b4bdb89ab41e4410de9012 - path: output/samtools/test.clipstats.txt - md5sum: 6fbde83d658cd2813b79900d33800d1d + md5sum: fc23355e1743d47f2541f2cb1a7a0cda diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index c7864c04..0b3aa2a9 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -1,8 +1,8 @@ - name: samtools fixmate test_samtools_fixmate command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config tags: - - samtools/fixmate - samtools + - samtools/fixmate files: - path: output/samtools/test.bam - md5sum: 92c8463710cdcaef2010aa02ed9e01fd + md5sum: a4092657a4b17170c7702a76cbf192a1 diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 66ab8211..279b99d8 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -1,4 +1,4 @@ -- name: samtools index bai +- name: samtools index test_samtools_index_bai command: nextflow run tests/modules/samtools/index -entry test_samtools_index_bai -c tests/config/nextflow.config tags: - samtools @@ -7,7 +7,7 @@ - path: output/samtools/test.paired_end.sorted.bam.bai md5sum: 704c10dd1326482448ca3073fdebc2f4 -- name: samtools index crai +- name: samtools index test_samtools_index_crai command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config tags: - samtools @@ -16,7 +16,7 @@ - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 -- name: samtools index csi +- name: samtools index test_samtools_index_csi command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config tags: - samtools diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index b39ca2ec..f04aa74b 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,15 +1,15 @@ - name: samtools merge test_samtools_merge command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.bam - name: samtools merge test_samtools_merge_cram command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.cram diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 12e6669f..785ec03b 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -5,4 +5,4 @@ - samtools/sort files: - path: output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 8e8b0c88..4e92b366 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -14,9 +14,9 @@ workflow test_samtools_stats { } workflow test_samtools_stats_cram { - input = [ [ id: 'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + input = [ [ id: 'test', single_end:true ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index a194c666..c186665a 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,17 +1,17 @@ - name: samtools stats test_samtools_stats command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.sorted.bam.stats - md5sum: a7f36cf11fd3bf97e0a0ae29c0627296 + md5sum: 09146eeecfcae2a84fb8615c86cd8d64 - name: samtools stats test_samtools_stats_cram command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats - md5sum: bd55a1da30028403f4b66dacf7a2a20e + md5sum: 62377b29c3f6253e37308a28d13a496d diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 116ea961..07c0b1b4 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -37,7 +37,7 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: e16eb632f7f462514b0873c7ac8ac905 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 2d837cd72432cd856fca70d33f02ffb5 + md5sum: d9eb909c2cde69d6ae83999a72d770d7 - name: align bowtie2 paired-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config @@ -78,4 +78,4 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: 29ff2fa56d35b2a47625b8f517f1a947 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 98aa88a39d26244c89bd4e577953fb48 + md5sum: d0c7a1a4fbd2c1aed437ca419a9e344f diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index 88ea9d5a..b84735e5 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -4,15 +4,15 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - # - samtools - # - samtools/index - # - samtools/sort - # - samtools/stats - # - samtools/idxstats - # - samtools/flagstat + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: e4c77897d6824ce4df486d1b100618af + md5sum: 8b56bb7d26ced04112f712250d915aaa - path: ./output/samtools/test.sorted.bam.bai md5sum: a70940ce9ba2e700ec2984e0a6526099 # samtools stats @@ -36,7 +36,7 @@ # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d - path: ./output/samtools/test.sorted.bam.bai md5sum: 20c91e3a0fd4661d7cb967f40d2486ba # samtools stats From 72c94dbed93cf7b5cc8f937e73a2caefd8ae8c04 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Fri, 19 Nov 2021 08:33:29 +0100 Subject: [PATCH 269/314] Add new module: Das Tool (#1004) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * add dastool/scaffolds2bin * add dastool * remove non reproducible md5sum check for compressed files * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * remove metabat2 from PR * fix linting errors * remove traling whitespace * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * PR review updates * update from PR review * update test files * add bacillus fragilis alignments * switch tests to bacillus fragilis * add string check * update test string * add pr comment answer * last fixes for PR review Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/dastool/dastool/functions.nf | 78 +++++++++++++++ modules/dastool/dastool/main.nf | 73 ++++++++++++++ modules/dastool/dastool/meta.yml | 100 +++++++++++++++++++ modules/dastool/scaffolds2bin/functions.nf | 78 +++++++++++++++ modules/dastool/scaffolds2bin/main.nf | 46 +++++++++ modules/dastool/scaffolds2bin/meta.yml | 58 +++++++++++ tests/config/pytest_modules.yml | 8 ++ tests/modules/dastool/dastool/main.nf | 33 ++++++ tests/modules/dastool/dastool/test.yml | 29 ++++++ tests/modules/dastool/scaffolds2bin/main.nf | 25 +++++ tests/modules/dastool/scaffolds2bin/test.yml | 14 +++ 11 files changed, 542 insertions(+) create mode 100644 modules/dastool/dastool/functions.nf create mode 100644 modules/dastool/dastool/main.nf create mode 100644 modules/dastool/dastool/meta.yml create mode 100644 modules/dastool/scaffolds2bin/functions.nf create mode 100644 modules/dastool/scaffolds2bin/main.nf create mode 100644 modules/dastool/scaffolds2bin/meta.yml create mode 100644 tests/modules/dastool/dastool/main.nf create mode 100644 tests/modules/dastool/dastool/test.yml create mode 100644 tests/modules/dastool/scaffolds2bin/main.nf create mode 100644 tests/modules/dastool/scaffolds2bin/test.yml diff --git a/modules/dastool/dastool/functions.nf b/modules/dastool/dastool/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/dastool/dastool/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..dff32294 --- /dev/null +++ b/modules/dastool/dastool/main.nf @@ -0,0 +1,73 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DASTOOL_DASTOOL { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" + } else { + container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" + } + + input: + tuple val(meta), path(contigs), path(bins) + path(proteins) + path(db_directory) + val(search_engine) + + output: + tuple val(meta), path("*.log") , emit: log + tuple val(meta), path("*_summary.txt") , emit: summary + tuple val(meta), path("*_DASTool_scaffolds2bin.txt") , emit: scaffolds2bin + tuple val(meta), path("*.eval") , optional: true, emit: eval + tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins + tuple val(meta), path("*.pdf") , optional: true, emit: pdfs + tuple val(meta), path("*.proteins.faa") , optional: true, emit: fasta_proteins + tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg + tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg + tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def bin_list = bins instanceof List ? bins.join(",") : "$bins" + def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" + def db_dir = db_directory ? "--db_directory $db_directory" : "" + def clean_contigs = contigs.toString() - ".gz" + def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs" + def decompress_proteins = proteins ? "gunzip -f $proteins" : "" + def clean_proteins = proteins ? proteins.toString() - ".gz" : "" + def proteins_pred = proteins ? "--proteins $clean_proteins" : "" + + if (! search_engine) { + log.info('[DAS_Tool] Default search engine (USEARCH) is proprietary software and not available in bioconda. Using DIAMOND as alternative.') + } + + """ + $decompress_proteins + $decompress_contigs + + DAS_Tool \\ + $options.args \\ + $proteins_pred \\ + $db_dir \\ + $engine \\ + -t $task.cpus \\ + --bins $bin_list \\ + -c $clean_contigs \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/dastool/meta.yml b/modules/dastool/dastool/meta.yml new file mode 100644 index 00000000..12d31e9f --- /dev/null +++ b/modules/dastool/dastool/meta.yml @@ -0,0 +1,100 @@ +name: dastool_dastool +description: DAS Tool binning step. +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - contigs: + type: file + description: fasta file + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - bins: + type: file + description: "Scaffolds2bin tabular file generated with dastool/scaffolds2bin" + pattern: "*.scaffolds2bin.tsv" + - proteins: + type: file + description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo) + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - db_directory: + type: file + description: (optional) Directory of single copy gene database. + - search_engine: + type: val + description: Engine used for single copy gene identification. USEARCH is not supported due to it being proprietary [blast/diamond] + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - log: + type: file + description: Log file of the run + pattern: "*.log" + - summary: + type: file + description: Summary of output bins including quality and completeness estimates + pattern: "*summary.txt" + - scaffolds2bin: + type: file + description: Scaffolds to bin file of output bins + pattern: "*.scaffolds2bin.txt" + - eval: + type: file + description: Quality and completeness estimates of input bin sets + pattern: "*.eval" + - pdfs: + type: file + description: Plots showing the amount of high quality bins and score distribution of bins per method + pattern: "*.pdf" + - fasta_proteins: + type: file + description: Output from prodigal if not already supplied + pattern: "*.proteins.faa" + - fasta_archaea_scg: + type: file + description: Results of archaeal single-copy-gene prediction + pattern: "*.archaea.scg" + - fasta_bacteria_scg: + type: file + description: Results of bacterial single-copy-gene prediction + pattern: "*.bacteria.scg" + - seqlength: + type: file + description: Summary of contig lengths + pattern: "*.seqlength" + +authors: + - "@maxibor" + - "@jfy133" diff --git a/modules/dastool/scaffolds2bin/functions.nf b/modules/dastool/scaffolds2bin/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/dastool/scaffolds2bin/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..b51a6e6e --- /dev/null +++ b/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,46 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DASTOOL_SCAFFOLDS2BIN { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" + } else { + container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + val(extension) + + output: + tuple val(meta), path("*.tsv"), emit: scaffolds2bin + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def file_extension = extension ? extension : "fasta" + + """ + gunzip -f *.${file_extension}.gz + + Fasta_to_Scaffolds2Bin.sh \\ + $options.args \\ + -i . \\ + -e $file_extension \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/scaffolds2bin/meta.yml b/modules/dastool/scaffolds2bin/meta.yml new file mode 100644 index 00000000..f41a3cf2 --- /dev/null +++ b/modules/dastool/scaffolds2bin/meta.yml @@ -0,0 +1,58 @@ +name: dastool_scaffolds2bin +description: Helper script to convert a set of bins in fasta format to tabular scaffolds2bin format +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins + pattern: "*.{fa,fas,fasta}" + - binner: + type: val + description: Name of the binning software (optional) + - extension: + type: val + description: Fasta file extension (fa | fas | fasta | ...) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - scaffolds2bin: + type: file + description: tabular scaffolds2bin file for DAS tool input + pattern: "*.scaffolds2bin.tsv" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index daa48bc2..55223f55 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -338,6 +338,14 @@ damageprofiler: - modules/damageprofiler/** - tests/modules/damageprofiler/** +dastool/dastool: + - modules/dastool/dastool/** + - tests/modules/dastool/dastool/** + +dastool/scaffolds2bin: + - modules/dastool/scaffolds2bin/** + - tests/modules/dastool/scaffolds2bin/** + dedup: - modules/dedup/** - tests/modules/dedup/** diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..31c32ef4 --- /dev/null +++ b/tests/modules/dastool/dastool/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) +include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' addParams( options: [args: '--score_threshold 0 --debug'] ) + +workflow test_dastool_dastool { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") + + Channel.of([ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)]) + .join(DASTOOL_SCAFFOLDS2BIN.out.scaffolds2bin) + .set {input_dastool} + + + DASTOOL_DASTOOL ( input_dastool, [], [], [] ) +} diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml new file mode 100644 index 00000000..eff02f96 --- /dev/null +++ b/tests/modules/dastool/dastool/test.yml @@ -0,0 +1,29 @@ +- name: dastool dastool test_dastool_dastool + command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config + tags: + - dastool + - dastool/dastool + files: + - path: output/dastool/test.seqlength + md5sum: b815a5811008c36808a59b1d0dcfab24 + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool.log + contains: + - 'DAS Tool run on' + - path: output/dastool/test_DASTool_scaffolds2bin.txt + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool_summary.txt + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/dastool/test_proteins.faa.archaea.scg + md5sum: e79d82eecee25821d1658ea4f082601d + - path: output/dastool/test_proteins.faa.bacteria.scg + md5sum: 8132cfb17cf398d41c036ead55c96ffe + - path: output/dastool/test_test.tsv.eval + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..63ffe82a --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) + +workflow test_dastool_scaffolds2bin { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") +} \ No newline at end of file diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml new file mode 100644 index 00000000..c6e25bff --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -0,0 +1,14 @@ +- name: dastool scaffolds2bin test_dastool_scaffolds2bin + command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c tests/config/nextflow.config + tags: + - dastool + - dastool/scaffolds2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 From 5ebe62612cc05b1b39359e4a2a2eda79c65fdd73 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 21 Nov 2021 05:14:02 -0700 Subject: [PATCH 270/314] add ectyper module (#948) * add ectyper module * fix-lint * try zcat * Update main.nf * fix lint * Update main.nf * Apply suggestions from code review Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> * Update main.nf * pass lint * Update main.nf * fix lint Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> Co-authored-by: Harshil Patel --- modules/ectyper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/ectyper/main.nf | 51 +++++++++++++++++++++ modules/ectyper/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ectyper/main.nf | 13 ++++++ tests/modules/ectyper/test.yml | 11 +++++ 6 files changed, 208 insertions(+) create mode 100644 modules/ectyper/functions.nf create mode 100644 modules/ectyper/main.nf create mode 100644 modules/ectyper/meta.yml create mode 100644 tests/modules/ectyper/main.nf create mode 100644 tests/modules/ectyper/test.yml diff --git a/modules/ectyper/functions.nf b/modules/ectyper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ectyper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf new file mode 100644 index 00000000..b5d8202d --- /dev/null +++ b/modules/ectyper/main.nf @@ -0,0 +1,51 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ECTYPER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ectyper=1.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.log"), emit: log + tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.txt"), emit: txt + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + ectyper \\ + $options.args \\ + --cores $task.cpus \\ + --output ./ \\ + --input $fasta_name + mv output.tsv ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/ectyper/meta.yml b/modules/ectyper/meta.yml new file mode 100644 index 00000000..a6beca29 --- /dev/null +++ b/modules/ectyper/meta.yml @@ -0,0 +1,51 @@ +name: ectyper +description: In silico prediction of E. coli serotype +keywords: + - escherichia coli + - fasta + - serotype +tools: + - ectyper: + description: ECtyper is a python program for serotyping E. coli genomes + homepage: https://github.com/phac-nml/ecoli_serotyping + documentation: https://github.com/phac-nml/ecoli_serotyping + tool_dev_url: https://github.com/phac-nml/ecoli_serotyping + doi: "" + licence: ['Apache 2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA formatted assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: ectyper log output + pattern: "*.log" + - tsv: + type: file + description: ectyper serotyping results in TSV format + pattern: "*.tsv" + - txt: + type: file + description: Allele report generated from BLAST results + pattern: "*.tst" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 55223f55..5a879cdc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -406,6 +406,10 @@ dshbio/splitgff3: - modules/dshbio/splitgff3/** - tests/modules/dshbio/splitgff3/** +ectyper: + - modules/ectyper/** + - tests/modules/ectyper/** + emmtyper: - modules/emmtyper/** - tests/modules/emmtyper/** diff --git a/tests/modules/ectyper/main.nf b/tests/modules/ectyper/main.nf new file mode 100644 index 00000000..123df68d --- /dev/null +++ b/tests/modules/ectyper/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ECTYPER } from '../../../modules/ectyper/main.nf' addParams( options: [:] ) + +workflow test_ectyper { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + ECTYPER ( input ) +} diff --git a/tests/modules/ectyper/test.yml b/tests/modules/ectyper/test.yml new file mode 100644 index 00000000..c6f4c668 --- /dev/null +++ b/tests/modules/ectyper/test.yml @@ -0,0 +1,11 @@ +- name: ectyper test_ectyper + command: nextflow run tests/modules/ectyper -entry test_ectyper -c tests/config/nextflow.config + tags: + - ectyper + files: + - path: output/ectyper/blast_output_alleles.txt + md5sum: 27f3f5e84f7da451b2948d61589cdb06 + - path: output/ectyper/ectyper.log + contains: ['Serotype', 'RefSeq', 'O-type', 'finished'] + - path: output/ectyper/test.tsv + md5sum: ba923d7c7ee7d1047466aafc9a9df208 From 29c669766d472ff67337d6fb8a149735cabaac53 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 21 Nov 2021 05:17:25 -0700 Subject: [PATCH 271/314] add bakta module (#1085) * add bakta module * Update main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/bakta/functions.nf | 78 ++++++++++++++++ modules/bakta/main.nf | 77 ++++++++++++++++ modules/bakta/meta.yml | 85 ++++++++++++++++++ ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 3558 -> 0 bytes tests/config/pytest_modules.yml | 4 + tests/modules/bakta/main.nf | 13 +++ tests/modules/bakta/test.yml | 25 ++++++ 7 files changed, 282 insertions(+) create mode 100644 modules/bakta/functions.nf create mode 100644 modules/bakta/main.nf create mode 100644 modules/bakta/meta.yml delete mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc create mode 100644 tests/modules/bakta/main.nf create mode 100644 tests/modules/bakta/test.yml diff --git a/modules/bakta/functions.nf b/modules/bakta/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bakta/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf new file mode 100644 index 00000000..2939f575 --- /dev/null +++ b/modules/bakta/main.nf @@ -0,0 +1,77 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAKTA { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bakta=1.2.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + path db + path proteins + path prodigal_tf + + output: + tuple val(meta), path("${prefix}.embl") , emit: embl + tuple val(meta), path("${prefix}.faa") , emit: faa + tuple val(meta), path("${prefix}.ffn") , emit: ffn + tuple val(meta), path("${prefix}.fna") , emit: fna + tuple val(meta), path("${prefix}.gbff") , emit: gbff + tuple val(meta), path("${prefix}.gff3") , emit: gff + tuple val(meta), path("${prefix}.hypotheticals.tsv"), emit: hypotheticals_tsv + tuple val(meta), path("${prefix}.hypotheticals.faa"), emit: hypotheticals_faa + tuple val(meta), path("${prefix}.tsv") , emit: tsv + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" + def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" + """ + bakta \\ + $options.args \\ + --threads $task.cpus \\ + --prefix ${prefix} \\ + --db $db \\ + $proteins_opt \\ + $prodigal_tf \\ + $fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ + + stub: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + touch ${prefix}.embl + touch ${prefix}.faa + touch ${prefix}.ffn + touch ${prefix}.fna + touch ${prefix}.gbff + touch ${prefix}.gff3 + touch ${prefix}.hypotheticals.tsv + touch ${prefix}.hypotheticals.faa + touch ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ +} diff --git a/modules/bakta/meta.yml b/modules/bakta/meta.yml new file mode 100644 index 00000000..29e6edbe --- /dev/null +++ b/modules/bakta/meta.yml @@ -0,0 +1,85 @@ +name: bakta +description: Rapid annotation of bacterial genomes & plasmids. +keywords: + - annotation + - fasta + - prokaryote +tools: + - bakta: + description: Rapid & standardized annotation of bacterial genomes & plasmids. + homepage: https://github.com/oschwengers/bakta + documentation: https://github.com/oschwengers/bakta + tool_dev_url: https://github.com/oschwengers/bakta + doi: "10.1099/mgen.0.000685" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: | + FASTA file to be annotated. Has to contain at least a non-empty string dummy value. + - db: + type: file + description: | + Path to the Bakta database + - proteins: + type: file + description: FASTA file of trusted proteins to first annotate from (optional) + - prodigal_tf: + type: file + description: Training file to use for Prodigal (optional) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: annotations as simple human readble tab separated values + pattern: "*.tsv" + - gff: + type: file + description: annotations & sequences in GFF3 format + pattern: "*.gff3" + - gbff: + type: file + description: annotations & sequences in (multi) GenBank format + pattern: "*.gbff" + - embl: + type: file + description: annotations & sequences in (multi) EMBL format + pattern: "*.embl" + - fna: + type: file + description: replicon/contig DNA sequences as FASTA + pattern: "*.fna" + - faa: + type: file + description: CDS/sORF amino acid sequences as FASTA + pattern: "*.faa" + - ffn: + type: file + description: feature nucleotide sequences as FASTA + pattern: "*.ffn" + - hypotheticals_tsv: + type: file + description: further information on hypothetical protein CDS as simple human readble tab separated values + pattern: "*.hypotheticals.tsv" + - hypotheticals_faa: + type: file + description: hypothetical protein CDS amino acid sequences as FASTA + pattern: "*.hypotheticals.faa" + +authors: + - "@rpetit3" diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc deleted file mode 100644 index 33acb8369a1bc62b5e66e1ed80e2247dd0e2759f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Sun, 21 Nov 2021 13:41:03 +0100 Subject: [PATCH 272/314] update: `gunzip` to include meta input tuple (#1082) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add bamUtil trimBam * Update modules/bamutil/trimbam/main.nf Co-authored-by: Harshil Patel * Update modules/bamutil/trimbam/main.nf * Changes after code-review * YAML lint * update: add (optional) meta to input tuple * YAML linting * Update main.nf Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: Sébastien Guizard Co-authored-by: Robert A. Petit III --- modules/gunzip/main.nf | 10 +++++----- modules/gunzip/meta.yml | 6 ++++++ tests/modules/gunzip/main.nf | 4 +++- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index aec4569f..564fa99d 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -9,7 +9,7 @@ process GUNZIP { label 'process_low' publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { @@ -19,14 +19,14 @@ process GUNZIP { } input: - path archive + tuple val(meta), path(archive) output: - path "$gunzip", emit: gunzip - path "versions.yml" , emit: versions + tuple val(meta), path("$gunzip"), emit: gunzip + path "versions.yml" , emit: versions script: - gunzip = archive.toString() - '.gz' + gunzip = archive.toString() - '.gz' """ gunzip \\ -f \\ diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index 3482f0d2..ea1f1546 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -10,6 +10,11 @@ tools: documentation: https://www.gnu.org/software/gzip/manual/gzip.html licence: ['GPL-3.0-or-later'] input: + - meta: + type: map + description: | + Optional groovy Map containing meta information + e.g. [ id:'test', single_end:false ] - archive: type: file description: File to be compressed/uncompressed @@ -26,3 +31,4 @@ output: authors: - "@joseespinosa" - "@drpatelh" + - "@jfy133" diff --git a/tests/modules/gunzip/main.nf b/tests/modules/gunzip/main.nf index 5a24e742..0c23a8cd 100644 --- a/tests/modules/gunzip/main.nf +++ b/tests/modules/gunzip/main.nf @@ -5,7 +5,9 @@ nextflow.enable.dsl = 2 include { GUNZIP } from '../../../modules/gunzip/main.nf' addParams( options: [:] ) workflow test_gunzip { - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + input = [ [], + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] GUNZIP ( input ) } From c25c3fe4669096da8bd4bf3143cfc139d126035d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sun, 21 Nov 2021 13:00:53 +0000 Subject: [PATCH 273/314] Update: `uLTRA` (#1081) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Update .gitignore * 📦 Add ultra module * 👌 IMPROVE: Update test input * 👌 IMPROVE: Update and clean code - Update to last versions.yml file - Update meta.yml - Correct typos * 👌 IMPROVE: Update output channels + Rename following subtool * 👌 IMPROVE: Remove old ultre files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 👌 IMPROVE: Remove old ultra files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 🐛 Fix: add unsaved modifications * 🐛 FIX: Remove one inconstant md5sum * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Remove md5sums for pickle files (not constant). * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: update output directory, update meta.yml * 👌 IMPROVE: Use modules to gunzip and sort gtf * 🐛 FIX: Set up channel correctly * 👌 IMPROVE: Remove pickles files and databases Those data might be useful in a debugging purpose. * Apply suggestions from code review * Update main.nf * 🐛 FIX: Update uLTRA to version 0.0.4.1 + remove $(pwd) * 👌 IMPROVE: Sort tags in test.yml * align order of input between main.nf and meta.yml. Add ksahlin as co-author (he did update his package to overcome the pwd-problem * Update main.nf * Update main.nf Co-authored-by: Harshil Patel Co-authored-by: Lasse Folkersen Co-authored-by: Robert A. Petit III --- modules/ultra/pipeline/main.nf | 12 ++++++------ modules/ultra/pipeline/meta.yml | 10 ++++++---- tests/modules/ultra/pipeline/main.nf | 17 +++++++---------- tests/modules/ultra/pipeline/test.yml | 2 +- 4 files changed, 20 insertions(+), 21 deletions(-) diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index 5a5c2c3e..b61518e6 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -11,11 +11,11 @@ process ULTRA_PIPELINE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4" : null) + conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" } else { - container "quay.io/biocontainers/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + container "quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" } input: @@ -35,9 +35,9 @@ process ULTRA_PIPELINE { --t $task.cpus \\ --prefix $prefix \\ $options.args \\ - \$(pwd)/$genome \\ - \$(pwd)/$gtf \\ - \$(pwd)/$reads \\ + $genome \\ + $gtf \\ + $reads \\ ./ cat <<-END_VERSIONS > versions.yml diff --git a/modules/ultra/pipeline/meta.yml b/modules/ultra/pipeline/meta.yml index d0008cfc..fa8366e8 100644 --- a/modules/ultra/pipeline/meta.yml +++ b/modules/ultra/pipeline/meta.yml @@ -18,6 +18,10 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: A fasta or fastq file of reads to align + pattern: "*.{fasta,fastq}" - genome: type: file description: fasta file of reference genome @@ -26,10 +30,6 @@ input: type: file description: A annotation of use the genome pattern: "*.gtf" - - reads: - type: file - description: A fasta or fastq file of reads to align - pattern: "*.{fasta,fastq}" output: - meta: @@ -48,3 +48,5 @@ output: authors: - "@sguizard" + - "@lassefolkersen" + - "@ksahlin" diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf index 881fe9a7..1404712b 100644 --- a/tests/modules/ultra/pipeline/main.nf +++ b/tests/modules/ultra/pipeline/main.nf @@ -8,18 +8,15 @@ include { GFFREAD } from '../../../../modules/gffread/main.nf' add workflow test_ultra_pipeline { - fastq = file(params.test_data['homo_sapiens']['pacbio']['hifi'] , checkIfExists: true) + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['pacbio']['hifi'], checkIfExists: true) + ] + GUNZIP ( input ) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true) genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - - GUNZIP ( fastq ) GFFREAD ( gtf ) - GUNZIP - .out - .gunzip - .map { [ [ id:'test', single_end:false ], it ] } - .set { input } - - ULTRA_PIPELINE ( input, genome, GFFREAD.out.gtf ) + ULTRA_PIPELINE ( GUNZIP.out.gunzip, genome, GFFREAD.out.gtf ) } diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml index fa378e58..7140193b 100644 --- a/tests/modules/ultra/pipeline/test.yml +++ b/tests/modules/ultra/pipeline/test.yml @@ -1,8 +1,8 @@ - name: ultra pipeline test_ultra_pipeline command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config tags: - - ultra/pipeline - ultra + - ultra/pipeline files: - path: output/gffread/genome_sorted.gtf md5sum: c0b034860c679a354cd093109ed90437 From 15fd90ffe8a9596406746e9112b861ed29f32952 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Sun, 21 Nov 2021 21:43:58 +0200 Subject: [PATCH 274/314] Add phyloflash module (#786) * initial stubs [ci skip] * remove comments and add main command [ci skip] * design iteration [ci skip] * add new standard functions.nf [ci skip] * update the version string [ci skip] * accomodate the db stubs and single/double ends [ci skip] * add FIXME for missing info [ci skip] * Accomodate the results folder [ci skip] * Update main.nf * Apply suggestions from code review * Update main.nf * Apply suggestions from code review * Add version file to stubs [ci skip] * Tweak the output dir pattern [ci skip] * Update modules/phyloflash/main.nf * Update modules/phyloflash/main.nf * Update modules/phyloflash/main.nf Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel Co-authored-by: FriederikeHanssen --- modules/phyloflash/functions.nf | 78 ++++++++++++++++++++++++++++ modules/phyloflash/main.nf | 85 +++++++++++++++++++++++++++++++ modules/phyloflash/meta.yml | 51 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/phyloflash/main.nf | 44 ++++++++++++++++ tests/modules/phyloflash/test.yml | 15 ++++++ 6 files changed, 277 insertions(+) create mode 100644 modules/phyloflash/functions.nf create mode 100644 modules/phyloflash/main.nf create mode 100644 modules/phyloflash/meta.yml create mode 100644 tests/modules/phyloflash/main.nf create mode 100644 tests/modules/phyloflash/test.yml diff --git a/modules/phyloflash/functions.nf b/modules/phyloflash/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/phyloflash/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf new file mode 100644 index 00000000..894c16a2 --- /dev/null +++ b/modules/phyloflash/main.nf @@ -0,0 +1,85 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PHYLOFLASH { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::phyloflash=3.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1" + } else { + container "quay.io/biocontainers/phyloflash:3.4--hdfd78af_1" + } + + input: + tuple val(meta), path(reads) + path silva_db + path univec_db + + output: + tuple val(meta), path("${meta.id}*/*"), emit: results + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if (meta.single_end) { + """ + phyloFlash.pl \\ + $options.args \\ + -read1 ${reads[0]} \\ + -lib $prefix \\ + -interleaved \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } else { + """ + phyloFlash.pl \\ + $options.args \\ + -read1 ${reads[0]} \\ + -read2 ${reads[1]} \\ + -lib $prefix \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } + + stub: + + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + mkdir ${prefix} + touch ${prefix}/${prefix}.SSU.collection.fasta + touch ${prefix}/${prefix}.phyloFlash + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ +} diff --git a/modules/phyloflash/meta.yml b/modules/phyloflash/meta.yml new file mode 100644 index 00000000..3ed7a9fa --- /dev/null +++ b/modules/phyloflash/meta.yml @@ -0,0 +1,51 @@ +name: phyloflash +description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. +keywords: + - metagenomics + - illumina datasets + - phylogenetic composition +tools: + - phyloflash: + description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. + + homepage: https://hrgv.github.io/phyloFlash/ + documentation: https://hrgv.github.io/phyloFlash/usage.html + tool_dev_url: https://github.com/HRGV/phyloFlash + doi: "10.1128/mSystems.00920-20" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Channel containing single or paired-end reads + pattern: "*.{fastq.gz,fq.gz}" + - sliva_db: + type: folder + description: Folder containing the SILVA database + pattern: "ref" + - univec_db: + type: folder + description: Folder containing UniVec database + pattern: "UniVec" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - results: + type: folder + description: Folder containing the results of phyloFlash analysis + pattern: "${prefix}*" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 077fefc1..69d6a80e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1011,6 +1011,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +phyloflash: + - modules/phyloflash/** + - tests/modules/phyloflash/** + picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** diff --git a/tests/modules/phyloflash/main.nf b/tests/modules/phyloflash/main.nf new file mode 100644 index 00000000..754d6747 --- /dev/null +++ b/tests/modules/phyloflash/main.nf @@ -0,0 +1,44 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' addParams( options: [:] ) + +process STUB_PHYLOFLASH_DATABASE { + output: + path "ref" , emit: silva_db + path "UniVec" , emit: univec_db + + stub: + """ + mkdir ref + touch UniVec + """ +} + +workflow test_phyloflash_single_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} + +workflow test_phyloflash_paired_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} diff --git a/tests/modules/phyloflash/test.yml b/tests/modules/phyloflash/test.yml new file mode 100644 index 00000000..0cba41c5 --- /dev/null +++ b/tests/modules/phyloflash/test.yml @@ -0,0 +1,15 @@ +- name: phyloflash single-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c tests/config/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: phyloflash paired-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c tests/config/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e From 14554981528013409f13ce4fb5b638ce87cb9828 Mon Sep 17 00:00:00 2001 From: Florian De Temmerman <69114541+fbdtemme@users.noreply.github.com> Date: Sun, 21 Nov 2021 20:56:57 +0100 Subject: [PATCH 275/314] CNVkit: Make targets file optional when running in WGS mode (#1030) * Make targets.bed optional when running in wgs mode * added test for cram * Update test_data_config with new reference.cnn * Update main.nf to allow tumor-only running Still need a unit-test for this. Almost ready, but needs this file as input https://github.com/nf-core/test-datasets/blob/modules/data/generic/cnn/reference.cnn * re-writing previous changes, but now it wont crash the entire CI-setup * fixing overlooked merge conflict * last overlooked merge-conflict * move all files to batch subfolder * adding an optional input for a reference file (needed when running germline and tumoronly) * minor typo * update meta.yml * aligning code, renaming cnvkit to cnvkit_batch, renaming tumorbam to tumor, normalbam to normal * Update pytest_modules.yml Co-authored-by: EC2 Default User Co-authored-by: Lasse Folkersen Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/cnvkit/{ => batch}/functions.nf | 0 modules/cnvkit/{ => batch}/main.nf | 35 +++++--- modules/cnvkit/{ => batch}/meta.yml | 17 ++-- tests/config/pytest_modules.yml | 6 +- tests/config/test_data.config | 3 + tests/modules/cnvkit/batch/main.nf | 64 +++++++++++++++ tests/modules/cnvkit/batch/test.yml | 101 ++++++++++++++++++++++++ tests/modules/cnvkit/main.nf | 19 ----- tests/modules/cnvkit/test.yml | 27 ------- 9 files changed, 207 insertions(+), 65 deletions(-) rename modules/cnvkit/{ => batch}/functions.nf (100%) rename modules/cnvkit/{ => batch}/main.nf (59%) mode change 100755 => 100644 rename modules/cnvkit/{ => batch}/meta.yml (89%) mode change 100755 => 100644 create mode 100755 tests/modules/cnvkit/batch/main.nf create mode 100755 tests/modules/cnvkit/batch/test.yml delete mode 100755 tests/modules/cnvkit/main.nf delete mode 100755 tests/modules/cnvkit/test.yml diff --git a/modules/cnvkit/functions.nf b/modules/cnvkit/batch/functions.nf similarity index 100% rename from modules/cnvkit/functions.nf rename to modules/cnvkit/batch/functions.nf diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/batch/main.nf old mode 100755 new mode 100644 similarity index 59% rename from modules/cnvkit/main.nf rename to modules/cnvkit/batch/main.nf index 27c8bb0d..06ecaa40 --- a/modules/cnvkit/main.nf +++ b/modules/cnvkit/batch/main.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -process CNVKIT { +process CNVKIT_BATCH { tag "$meta.id" label 'process_low' publishDir "${params.outdir}", @@ -19,25 +19,40 @@ process CNVKIT { } input: - tuple val(meta), path(tumourbam), path(normalbam) + tuple val(meta), path(tumor), path(normal) path fasta - path targetfile + path targets + path reference output: tuple val(meta), path("*.bed"), emit: bed - tuple val(meta), path("*.cnn"), emit: cnn - tuple val(meta), path("*.cnr"), emit: cnr - tuple val(meta), path("*.cns"), emit: cns + tuple val(meta), path("*.cnn"), emit: cnn, optional: true + tuple val(meta), path("*.cnr"), emit: cnr, optional: true + tuple val(meta), path("*.cns"), emit: cns, optional: true path "versions.yml" , emit: versions script: + normal_args = normal ? "--normal $normal" : "" + fasta_args = fasta ? "--fasta $fasta" : "" + reference_args = reference ? "--reference $reference" : "" + + def target_args = "" + if (options.args.contains("--method wgs") || options.args.contains("-m wgs")) { + target_args = targets ? "--targets $targets" : "" + } + else { + target_args = "--targets $targets" + } + """ cnvkit.py \\ batch \\ - $tumourbam \\ - --normal $normalbam\\ - --fasta $fasta \\ - --targets $targetfile \\ + $tumor \\ + $normal_args \\ + $fasta_args \\ + $reference_args \\ + $target_args \\ + --processes ${task.cpus} \\ $options.args cat <<-END_VERSIONS > versions.yml diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/batch/meta.yml old mode 100755 new mode 100644 similarity index 89% rename from modules/cnvkit/meta.yml rename to modules/cnvkit/batch/meta.yml index 3e760d16..0d263041 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/batch/meta.yml @@ -1,4 +1,4 @@ -name: cnvkit +name: cnvkit_batch description: Copy number variant detection from high-throughput sequencing data keywords: - bam @@ -38,14 +38,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - tumourbam: + - tumour: type: file description: | - Input tumour sample bam file - - normalbam: + Input tumour sample bam file (or cram) + - normal: type: file description: | - Input normal sample bam file + Input normal sample bam file (or cram) - fasta: type: file description: | @@ -54,6 +54,10 @@ input: type: file description: | Input target bed file + - reference: + type: file + description: | + Input reference cnn-file (only for germline and tumor-only running) output: - meta: type: map @@ -85,4 +89,5 @@ authors: - "@KevinMenden" - "@MaxUlysse" - "@drpatelh" - + - "@fbdtemme" + - "@lassefolkersen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 69d6a80e..7b47bfea 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,9 +294,9 @@ cmseq/polymut: - modules/cmseq/polymut/** - tests/modules/cmseq/polymut/** -cnvkit: - - modules/cnvkit/** - - tests/modules/cnvkit/** +cnvkit/batch: + - modules/cnvkit/batch/** + - tests/modules/cnvkit/batch/** cooler/digest: - modules/cooler/digest/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 3351204d..c3bae012 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -263,6 +263,9 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } + 'cnn' { + reference = "${test_data_dir}/generic/cnn/reference.cnn" + } 'cooler'{ test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" test_pairix_pair_gz_px2 = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz.px2" diff --git a/tests/modules/cnvkit/batch/main.nf b/tests/modules/cnvkit/batch/main.nf new file mode 100755 index 00000000..5d92afaa --- /dev/null +++ b/tests/modules/cnvkit/batch/main.nf @@ -0,0 +1,64 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) +include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn --method wgs' ] ) +include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--method wgs' ] ) + + +workflow test_cnvkit_hybrid { + tumor = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test' ], // meta map + tumor, + normal + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + + CNVKIT_HYBRID ( input, fasta, targets, [] ) +} + +workflow test_cnvkit_wgs { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + normal + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + + +workflow test_cnvkit_cram { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + normal + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + + + +workflow test_cnvkit_tumoronly { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + [ ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + reference = file(params.test_data['generic']['cnn']['reference'], checkIfExists: true) + + CNVKIT_TUMORONLY ( input, [], [], reference ) +} diff --git a/tests/modules/cnvkit/batch/test.yml b/tests/modules/cnvkit/batch/test.yml new file mode 100755 index 00000000..96ea670c --- /dev/null +++ b/tests/modules/cnvkit/batch/test.yml @@ -0,0 +1,101 @@ +- name: cnvkit batch test_cnvkit_hybrid + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/baits.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/baits.target.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb + - path: output/cnvkit/reference.cnn + md5sum: ac99c1ad8b917b96ae15119146c91ab9 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test.paired_end.sorted.call.cns + md5sum: f2ca59b4d50b0c317adc526c1b99b622 + - path: output/cnvkit/test.paired_end.sorted.cnr + md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 + - path: output/cnvkit/test.paired_end.sorted.cns + md5sum: 060af1aa637ed51812af19bcce24fcfe + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: 3fe80b6013ffc3e9968345e810158215 + - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn + md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 + +- name: cnvkit batch test_cnvkit_wgs + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_cram + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_cram -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_tumoronly + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/reference.antitarget-tmp.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/reference.target-tmp.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb diff --git a/tests/modules/cnvkit/main.nf b/tests/modules/cnvkit/main.nf deleted file mode 100755 index 6ee959ab..00000000 --- a/tests/modules/cnvkit/main.nf +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { CNVKIT } from '../../../modules/cnvkit/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) - -workflow test_cnvkit { - tumourbam = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - normalbam = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) - - input = [ [ id:'test' ], // meta map - tumourbam, - normalbam - ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) - - CNVKIT ( input, fasta, targets ) -} diff --git a/tests/modules/cnvkit/test.yml b/tests/modules/cnvkit/test.yml deleted file mode 100755 index 6e09d6f3..00000000 --- a/tests/modules/cnvkit/test.yml +++ /dev/null @@ -1,27 +0,0 @@ -- name: cnvkit - command: nextflow run ./tests/modules/cnvkit/ -entry test_cnvkit -c tests/config/nextflow.config - tags: - - cnvkit - files: - - path: output/cnvkit/baits.target.bed - md5sum: 26d25ff2d6c45b6d92169b3559c6acdb - - path: output/cnvkit/baits.antitarget.bed - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/cnvkit/reference.cnn - md5sum: ac99c1ad8b917b96ae15119146c91ab9 - - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn - md5sum: 3fe80b6013ffc3e9968345e810158215 - - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn - md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 - - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.paired_end.sorted.cnr - md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 - - path: output/cnvkit/test.paired_end.sorted.cns - md5sum: 060af1aa637ed51812af19bcce24fcfe - - path: output/cnvkit/test.paired_end.sorted.bintest.cns - md5sum: 6544d979475def8a9f69ba42a985668d - - path: output/cnvkit/test.paired_end.sorted.call.cns - md5sum: f2ca59b4d50b0c317adc526c1b99b622 From 45985ff6f01c6e2e229b665ba45b159bd925513d Mon Sep 17 00:00:00 2001 From: Annick Renevey <47788523+rannick@users.noreply.github.com> Date: Sun, 21 Nov 2021 21:14:09 +0100 Subject: [PATCH 276/314] Peddy nf core2 2dev0 (#1048) * Updated module to fit nf-core2.2dev0 templates * Updated module to fit nf-core2.2dev0 templates * Linked test files to nf-core/test-dataset branch:raredisease raw files * Change order of input: vcf and tbi with meta, ped without * Change order of input: vcf and tbi with meta, ped without/adapt test * Change order of input: vcf and tbi with meta, ped without/adapt test, bugfix * Indent and rename files * Update modules/peddy/main.nf Removed newline Co-authored-by: FriederikeHanssen * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/meta.yml Co-authored-by: Harshil Patel * Update modules/peddy/meta.yml Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update pytest_modules.yml * Update main.nf * Apply suggestions from code review Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/peddy/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/peddy/main.nf | 47 ++++++++++++++++++++ modules/peddy/meta.yml | 64 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 ++ tests/modules/peddy/main.nf | 17 +++++++ tests/modules/peddy/test.yml | 17 +++++++ 7 files changed, 231 insertions(+) create mode 100644 modules/peddy/functions.nf create mode 100644 modules/peddy/main.nf create mode 100644 modules/peddy/meta.yml create mode 100644 tests/modules/peddy/main.nf create mode 100644 tests/modules/peddy/test.yml diff --git a/modules/peddy/functions.nf b/modules/peddy/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/peddy/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf new file mode 100644 index 00000000..4331ed9d --- /dev/null +++ b/modules/peddy/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PEDDY { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::peddy=0.4.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0" + } else { + container "quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0" + } + + input: + tuple val(meta), path(vcf), path(vcf_tbi) + path ped + + output: + tuple val(meta), path("*.html") , emit: html + tuple val(meta), path("*.csv") , emit: csv + tuple val(meta), path("*.peddy.ped"), emit: ped + tuple val(meta), path("*.png") , emit: png + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + peddy \\ + $options.args \\ + --plot \\ + -p $task.cpus \\ + $vcf \\ + $ped + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) + END_VERSIONS + """ +} diff --git a/modules/peddy/meta.yml b/modules/peddy/meta.yml new file mode 100644 index 00000000..7c3fcf45 --- /dev/null +++ b/modules/peddy/meta.yml @@ -0,0 +1,64 @@ +name: peddy +description: Manipulation, validation and exploration of pedigrees +keywords: + - pedigrees + - ped + - family + +tools: + - peddy: + description: genotype, ped correspondence check, ancestry check, sex check. directly, quickly on VCF + homepage: https://github.com/brentp/peddy + documentation: https://peddy.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/brentp/peddy + doi: "https://doi.org/10.1016/j.ajhg.2017.01.017" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF file + pattern: "*.{vcf.gz}" + - ped: + type: file + description: PED/FAM file + pattern: "*.{ped,fam}" + - vcf_tbi: + type: file + description: TBI file + pattern: "*.{vcf.gz.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ped: + type: file + description: PED/FAM file + pattern: "*.peddy.{ped}" + - html: + type: file + description: HTML file + pattern: "*.{html}" + - csv: + type: file + description: CSV file + pattern: "*.{csv}" + - png: + type: file + description: PNG file + pattern: "*.{png}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@rannick" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7b47bfea..d68d64d0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1019,6 +1019,10 @@ picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** +peddy: + - modules/peddy/** + - tests/modules/peddy/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index c3bae012..0c7ce2fc 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -128,6 +128,10 @@ params { index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" + justhusky_ped = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky.ped" + justhusky_minimal_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz" + justhusky_minimal_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz.tbi" + } 'illumina' { test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" diff --git a/tests/modules/peddy/main.nf b/tests/modules/peddy/main.nf new file mode 100644 index 00000000..d6331752 --- /dev/null +++ b/tests/modules/peddy/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PEDDY } from '../../../modules/peddy/main.nf' addParams( options: [:] ) + +workflow test_peddy { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz_tbi'], checkIfExists: true) + ] + ped = file(params.test_data['homo_sapiens']['genome']['justhusky_ped'], checkIfExists: true) + + PEDDY ( input , ped ) +} diff --git a/tests/modules/peddy/test.yml b/tests/modules/peddy/test.yml new file mode 100644 index 00000000..77bf00f6 --- /dev/null +++ b/tests/modules/peddy/test.yml @@ -0,0 +1,17 @@ +- name: peddy test_peddy + command: nextflow run tests/modules/peddy -entry test_peddy -c tests/config/nextflow.config + tags: + - peddy + files: + - path: output/peddy/justhusky_minimal.het_check.csv + md5sum: f4006d47355f2a760e40215b403926c3 + - path: output/peddy/justhusky_minimal.html + md5sum: 4f189cdbe8f03fe5c32d343c183506a5 + - path: output/peddy/justhusky_minimal.ped_check.csv + md5sum: d79a98558e280afe794d1374d2b985d4 + - path: output/peddy/justhusky_minimal.ped_check.rel-difference.csv + md5sum: 9de7e287cb30c742db2ff3622b0e63b1 + - path: output/peddy/justhusky_minimal.sex_check.csv + md5sum: 60848489bc697490da6a53b5170baf3b + - path: output/peddy/justhusky_minimal.vs.html + md5sum: 20f5f3a97fa781057c876ac79e044010 From 20d8250d9f39ddb05dfb437603aaf99b5c0b2b41 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 26 Nov 2021 07:58:40 +0000 Subject: [PATCH 277/314] Update all modules to new NF DSL2 syntax (#1099) * Add comment line for consistency * Remove all functions.nf * Remove include functions.nf and publishDir options * Replace options.args3 with task.ext.args3 - 3 modules * Replace options.args3 with task.ext.args3 - 17 modules * Replace {task.cpus} with task.cpus * Replace off on off off off off off off off on off on off on off off off on off off off on on off off off on on off off off off off off off on off off off off on off on on off off off on on on on off off off on off on on off on on off off on on on off on on off on off off off off on off off off on off off on off on off off off on on off on off on off off on off off off on off off off on off off off off on off off off on on on off on on off off on off on on on off on on off on on on off off off off off on on off off on off off off off off on off off on on off on on off on off off off on off off off off on on off on off off on off off on off on off off off off off off off off on on off on off off off.args with * Add def args = task.ext.args line to all modules in script section * Replace options.args with args and args_list * Initialise args2 and args3 properly * Replace container syntax * Revert container changes for cellranger/mkref * Replace getProcessName in all modules * Replace getSoftwareName in all modules * Unify modules using VERSION variable * Replae options.suffix with task.ext.suffix * Remove NF version restriction for CI * Bump NF version in README * Replace task.process.tokenize logic with task.process * Minor tweaks to unify syntax in tests main.nf * Add a separate nextflow.config for each module * Transfer remaining module options to nextflow.config * Remove addParams from tests main.nf * Remove TODO statements * Use -c to import module specific config * Bump NF version to 21.10.3 * Fix tests for artic/minion * Fix broken publishDir syntax * Standardise and fix obvious failing module tests * Remove kronatools to krona * Comment out tags in subworkflow test.yml * Fix failing module tests * Add consistent indentation to nextflow.config * Comment out subworklow definitions * Fix kallistobustools/ref * Fix rmarkdownnotebook * Fix jupyternotebook * Quote task.process * Add plink2/vcf to pytest_modules.yml * Remove NF_CORE_MODULES_TEST from pytest CI * Fix more tests * Move bacteroides_fragilis to prokaryotes folder * Fix cooler merge tests * Fix kallistobustools/count tests * Fix kallistobustools/ref tests * Update test_10x_1_fastq_gz file for kallistobustools/count tests * Fix bcftools/query tests * Fix delly/call tests * Fix cooler/zoomify tests * Fix csvtk/split tests * Fix gatk4/intervallisttools tests * Fix gatk4/variantfiltration * Fix pydamage/filter tests * Fix test data for unicycler * Fix gstama/collapse module * Fix leehom tests * Fix metaphlan3 tests * Fix pairtools/select tests * Update nextflow.config * Update nextflow.config * feat: update syntax * Fix arriba tests * Fix more failing tests * Update test syntax * Remove comments from tests nextflow.config * Apply suggestions from code review * Fix kallistobustools/count module * Update dumpsoftwareversions module * Update custom/dumpsoftwareversions * Add args2 to untar module * Update leftover modules * Remove last remaining addParams Co-authored-by: JoseEspinosa Co-authored-by: Gregor Sturm Co-authored-by: MaxUlysse --- .github/workflows/nf-core-linting.yml | 4 - .github/workflows/pytest-workflow.yml | 10 +- README.md | 4 +- modules/abacas/functions.nf | 78 ---------- modules/abacas/main.nf | 26 ++-- modules/adapterremoval/functions.nf | 78 ---------- modules/adapterremoval/main.nf | 37 ++--- modules/agrvate/functions.nf | 78 ---------- modules/agrvate/main.nf | 26 ++-- modules/allelecounter/functions.nf | 78 ---------- modules/allelecounter/main.nf | 26 ++-- modules/amps/functions.nf | 78 ---------- modules/amps/main.nf | 24 +--- modules/arriba/functions.nf | 78 ---------- modules/arriba/main.nf | 28 ++-- modules/artic/guppyplex/functions.nf | 78 ---------- modules/artic/guppyplex/main.nf | 26 ++-- modules/artic/minion/functions.nf | 78 ---------- modules/artic/minion/main.nf | 34 ++--- modules/assemblyscan/functions.nf | 78 ---------- modules/assemblyscan/main.nf | 24 +--- modules/ataqv/ataqv/functions.nf | 78 ---------- modules/ataqv/ataqv/main.nf | 25 ++-- modules/bakta/functions.nf | 78 ---------- modules/bakta/main.nf | 34 ++--- modules/bamaligncleaner/functions.nf | 78 ---------- modules/bamaligncleaner/main.nf | 26 ++-- modules/bamtools/split/functions.nf | 78 ---------- modules/bamtools/split/main.nf | 26 ++-- modules/bamutil/trimbam/functions.nf | 78 ---------- modules/bamutil/trimbam/main.nf | 26 ++-- modules/bandage/image/functions.nf | 78 ---------- modules/bandage/image/main.nf | 28 ++-- modules/bbmap/align/functions.nf | 78 ---------- modules/bbmap/align/main.nf | 26 ++-- modules/bbmap/bbduk/functions.nf | 78 ---------- modules/bbmap/bbduk/main.nf | 25 ++-- modules/bbmap/bbsplit/functions.nf | 78 ---------- modules/bbmap/bbsplit/main.nf | 32 ++--- modules/bbmap/index/functions.nf | 78 ---------- modules/bbmap/index/main.nf | 24 +--- modules/bcftools/concat/functions.nf | 78 ---------- modules/bcftools/concat/main.nf | 26 ++-- modules/bcftools/consensus/functions.nf | 78 ---------- modules/bcftools/consensus/main.nf | 26 ++-- modules/bcftools/filter/functions.nf | 78 ---------- modules/bcftools/filter/main.nf | 26 ++-- modules/bcftools/index/functions.nf | 78 ---------- modules/bcftools/index/main.nf | 26 ++-- modules/bcftools/isec/functions.nf | 78 ---------- modules/bcftools/isec/main.nf | 26 ++-- modules/bcftools/merge/functions.nf | 78 ---------- modules/bcftools/merge/main.nf | 26 ++-- modules/bcftools/mpileup/functions.nf | 78 ---------- modules/bcftools/mpileup/main.nf | 36 +++-- modules/bcftools/norm/functions.nf | 78 ---------- modules/bcftools/norm/main.nf | 26 ++-- modules/bcftools/query/functions.nf | 78 ---------- modules/bcftools/query/main.nf | 26 ++-- modules/bcftools/reheader/functions.nf | 78 ---------- modules/bcftools/reheader/main.nf | 26 ++-- modules/bcftools/stats/functions.nf | 78 ---------- modules/bcftools/stats/main.nf | 26 ++-- modules/bcftools/view/functions.nf | 78 ---------- modules/bcftools/view/main.nf | 28 ++-- modules/bedtools/bamtobed/functions.nf | 78 ---------- modules/bedtools/bamtobed/main.nf | 26 ++-- modules/bedtools/complement/functions.nf | 78 ---------- modules/bedtools/complement/main.nf | 26 ++-- modules/bedtools/genomecov/functions.nf | 78 ---------- modules/bedtools/genomecov/main.nf | 34 ++--- modules/bedtools/getfasta/functions.nf | 78 ---------- modules/bedtools/getfasta/main.nf | 26 ++-- modules/bedtools/intersect/functions.nf | 78 ---------- modules/bedtools/intersect/main.nf | 26 ++-- modules/bedtools/makewindows/functions.nf | 78 ---------- modules/bedtools/makewindows/main.nf | 26 ++-- modules/bedtools/maskfasta/functions.nf | 78 ---------- modules/bedtools/maskfasta/main.nf | 26 ++-- modules/bedtools/merge/functions.nf | 78 ---------- modules/bedtools/merge/main.nf | 26 ++-- modules/bedtools/slop/functions.nf | 78 ---------- modules/bedtools/slop/main.nf | 26 ++-- modules/bedtools/sort/functions.nf | 78 ---------- modules/bedtools/sort/main.nf | 26 ++-- modules/bedtools/subtract/functions.nf | 78 ---------- modules/bedtools/subtract/main.nf | 26 ++-- modules/bismark/align/functions.nf | 78 ---------- modules/bismark/align/main.nf | 26 ++-- modules/bismark/deduplicate/functions.nf | 78 ---------- modules/bismark/deduplicate/main.nf | 26 ++-- .../bismark/genomepreparation/functions.nf | 78 ---------- modules/bismark/genomepreparation/main.nf | 24 +--- .../bismark/methylationextractor/functions.nf | 78 ---------- modules/bismark/methylationextractor/main.nf | 24 +--- modules/bismark/report/functions.nf | 78 ---------- modules/bismark/report/main.nf | 24 +--- modules/bismark/summary/functions.nf | 78 ---------- modules/bismark/summary/main.nf | 22 +-- modules/blast/blastn/functions.nf | 78 ---------- modules/blast/blastn/main.nf | 26 ++-- modules/blast/makeblastdb/functions.nf | 78 ---------- modules/blast/makeblastdb/main.nf | 24 +--- modules/bowtie/align/functions.nf | 78 ---------- modules/bowtie/align/main.nf | 29 ++-- modules/bowtie/build/functions.nf | 78 ---------- modules/bowtie/build/main.nf | 22 +-- modules/bowtie2/align/functions.nf | 78 ---------- modules/bowtie2/align/main.nf | 37 ++--- modules/bowtie2/build/functions.nf | 78 ---------- modules/bowtie2/build/main.nf | 24 +--- modules/bwa/aln/functions.nf | 78 ---------- modules/bwa/aln/main.nf | 34 ++--- modules/bwa/index/functions.nf | 78 ---------- modules/bwa/index/main.nf | 24 +--- modules/bwa/mem/functions.nf | 78 ---------- modules/bwa/mem/main.nf | 29 ++-- modules/bwa/sampe/functions.nf | 78 ---------- modules/bwa/sampe/main.nf | 26 ++-- modules/bwa/samse/functions.nf | 78 ---------- modules/bwa/samse/main.nf | 26 ++-- modules/bwamem2/index/functions.nf | 78 ---------- modules/bwamem2/index/main.nf | 24 +--- modules/bwamem2/mem/functions.nf | 78 ---------- modules/bwamem2/mem/main.nf | 29 ++-- modules/bwameth/align/functions.nf | 78 ---------- modules/bwameth/align/main.nf | 29 ++-- modules/bwameth/index/functions.nf | 78 ---------- modules/bwameth/index/main.nf | 22 +-- modules/cat/cat/functions.nf | 78 ---------- modules/cat/cat/main.nf | 25 ++-- modules/cat/fastq/functions.nf | 78 ---------- modules/cat/fastq/main.nf | 28 ++-- modules/cellranger/mkref/functions.nf | 78 ---------- modules/cellranger/mkref/main.nf | 27 ++-- modules/checkm/lineagewf/functions.nf | 78 ---------- modules/checkm/lineagewf/main.nf | 26 ++-- modules/chromap/chromap/functions.nf | 78 ---------- modules/chromap/chromap/main.nf | 55 ++++--- modules/chromap/index/functions.nf | 78 ---------- modules/chromap/index/main.nf | 28 ++-- modules/clonalframeml/functions.nf | 78 ---------- modules/clonalframeml/main.nf | 26 ++-- modules/cmseq/polymut/functions.nf | 78 ---------- modules/cmseq/polymut/main.nf | 27 ++-- modules/cnvkit/batch/functions.nf | 78 ---------- modules/cnvkit/batch/main.nf | 35 ++--- modules/cooler/cload/functions.nf | 78 ---------- modules/cooler/cload/main.nf | 28 ++-- modules/cooler/digest/functions.nf | 78 ---------- modules/cooler/digest/main.nf | 24 +--- modules/cooler/dump/functions.nf | 78 ---------- modules/cooler/dump/main.nf | 26 ++-- modules/cooler/dump/meta.yml | 3 + modules/cooler/merge/functions.nf | 78 ---------- modules/cooler/merge/main.nf | 26 ++-- modules/cooler/zoomify/functions.nf | 78 ---------- modules/cooler/zoomify/main.nf | 26 ++-- modules/csvtk/concat/functions.nf | 78 ---------- modules/csvtk/concat/main.nf | 24 +--- modules/csvtk/split/functions.nf | 78 ---------- modules/csvtk/split/main.nf | 26 ++-- .../custom/dumpsoftwareversions/functions.nf | 78 ---------- modules/custom/dumpsoftwareversions/main.nf | 95 +----------- modules/custom/dumpsoftwareversions/meta.yml | 1 + .../templates/dumpsoftwareversions.py | 89 ++++++++++++ modules/custom/getchromsizes/functions.nf | 78 ---------- modules/custom/getchromsizes/main.nf | 22 +-- modules/cutadapt/functions.nf | 78 ---------- modules/cutadapt/main.nf | 26 ++-- modules/damageprofiler/functions.nf | 78 ---------- modules/damageprofiler/main.nf | 37 ++--- modules/dastool/dastool/functions.nf | 78 ---------- modules/dastool/dastool/main.nf | 25 ++-- modules/dastool/scaffolds2bin/functions.nf | 78 ---------- modules/dastool/scaffolds2bin/main.nf | 25 ++-- modules/dedup/functions.nf | 78 ---------- modules/dedup/main.nf | 26 ++-- modules/deeptools/computematrix/functions.nf | 78 ---------- modules/deeptools/computematrix/main.nf | 26 ++-- .../deeptools/plotfingerprint/functions.nf | 78 ---------- modules/deeptools/plotfingerprint/main.nf | 26 ++-- modules/deeptools/plotheatmap/functions.nf | 78 ---------- modules/deeptools/plotheatmap/main.nf | 26 ++-- modules/deeptools/plotprofile/functions.nf | 78 ---------- modules/deeptools/plotprofile/main.nf | 26 ++-- modules/delly/call/functions.nf | 78 ---------- modules/delly/call/main.nf | 26 ++-- modules/diamond/blastp/functions.nf | 78 ---------- modules/diamond/blastp/main.nf | 26 ++-- modules/diamond/blastx/functions.nf | 78 ---------- modules/diamond/blastx/main.nf | 26 ++-- modules/diamond/makedb/functions.nf | 78 ---------- modules/diamond/makedb/main.nf | 24 +--- modules/dragonflye/functions.nf | 78 ---------- modules/dragonflye/main.nf | 24 +--- modules/dshbio/exportsegments/functions.nf | 78 ---------- modules/dshbio/exportsegments/main.nf | 26 ++-- modules/dshbio/filterbed/functions.nf | 78 ---------- modules/dshbio/filterbed/main.nf | 26 ++-- modules/dshbio/filtergff3/functions.nf | 78 ---------- modules/dshbio/filtergff3/main.nf | 26 ++-- modules/dshbio/splitbed/functions.nf | 78 ---------- modules/dshbio/splitbed/main.nf | 26 ++-- modules/dshbio/splitgff3/functions.nf | 78 ---------- modules/dshbio/splitgff3/main.nf | 26 ++-- modules/ectyper/functions.nf | 78 ---------- modules/ectyper/main.nf | 27 ++-- modules/emmtyper/functions.nf | 78 ---------- modules/emmtyper/main.nf | 26 ++-- modules/ensemblvep/functions.nf | 78 ---------- modules/ensemblvep/main.nf | 34 ++--- modules/expansionhunter/functions.nf | 78 ---------- modules/expansionhunter/main.nf | 26 ++-- modules/fargene/functions.nf | 78 ---------- modules/fargene/main.nf | 28 ++-- modules/fastani/functions.nf | 78 ---------- modules/fastani/main.nf | 28 ++-- modules/fastp/functions.nf | 78 ---------- modules/fastp/main.nf | 32 ++--- modules/fastqc/functions.nf | 78 ---------- modules/fastqc/main.nf | 32 ++--- modules/fastqscan/functions.nf | 78 ---------- modules/fastqscan/main.nf | 26 ++-- modules/fasttree/functions.nf | 78 ---------- modules/fasttree/main.nf | 24 +--- .../callmolecularconsensusreads/functions.nf | 78 ---------- .../fgbio/callmolecularconsensusreads/main.nf | 25 ++-- modules/fgbio/fastqtobam/functions.nf | 78 ---------- modules/fgbio/fastqtobam/main.nf | 34 ++--- modules/fgbio/groupreadsbyumi/functions.nf | 78 ---------- modules/fgbio/groupreadsbyumi/main.nf | 26 ++-- modules/fgbio/sortbam/functions.nf | 78 ---------- modules/fgbio/sortbam/main.nf | 25 ++-- modules/filtlong/functions.nf | 78 ---------- modules/filtlong/main.nf | 26 ++-- modules/flash/functions.nf | 78 ---------- modules/flash/main.nf | 26 ++-- modules/freebayes/functions.nf | 78 ---------- modules/freebayes/main.nf | 40 ++---- modules/freebayes/meta.yml | 7 +- modules/gatk4/applybqsr/functions.nf | 78 ---------- modules/gatk4/applybqsr/main.nf | 26 ++-- modules/gatk4/baserecalibrator/functions.nf | 78 ---------- modules/gatk4/baserecalibrator/main.nf | 26 ++-- modules/gatk4/bedtointervallist/functions.nf | 78 ---------- modules/gatk4/bedtointervallist/main.nf | 26 ++-- .../gatk4/calculatecontamination/functions.nf | 78 ---------- modules/gatk4/calculatecontamination/main.nf | 26 ++-- .../createsequencedictionary/functions.nf | 78 ---------- .../gatk4/createsequencedictionary/main.nf | 24 +--- .../createsomaticpanelofnormals/functions.nf | 78 ---------- .../gatk4/createsomaticpanelofnormals/main.nf | 26 ++-- .../estimatelibrarycomplexity/functions.nf | 78 ---------- .../gatk4/estimatelibrarycomplexity/main.nf | 26 ++-- modules/gatk4/fastqtosam/functions.nf | 78 ---------- modules/gatk4/fastqtosam/main.nf | 26 ++-- modules/gatk4/filtermutectcalls/functions.nf | 78 ---------- modules/gatk4/filtermutectcalls/main.nf | 26 ++-- modules/gatk4/genomicsdbimport/functions.nf | 78 ---------- modules/gatk4/genomicsdbimport/main.nf | 26 ++-- modules/gatk4/genotypegvcfs/functions.nf | 78 ---------- modules/gatk4/genotypegvcfs/main.nf | 26 ++-- modules/gatk4/getpileupsummaries/functions.nf | 78 ---------- modules/gatk4/getpileupsummaries/main.nf | 26 ++-- modules/gatk4/haplotypecaller/functions.nf | 78 ---------- modules/gatk4/haplotypecaller/main.nf | 26 ++-- modules/gatk4/indexfeaturefile/functions.nf | 78 ---------- modules/gatk4/indexfeaturefile/main.nf | 24 +--- modules/gatk4/intervallisttools/functions.nf | 78 ---------- modules/gatk4/intervallisttools/main.nf | 26 ++-- .../learnreadorientationmodel/functions.nf | 78 ---------- .../gatk4/learnreadorientationmodel/main.nf | 26 ++-- modules/gatk4/markduplicates/functions.nf | 78 ---------- modules/gatk4/markduplicates/main.nf | 26 ++-- modules/gatk4/mergebamalignment/functions.nf | 78 ---------- modules/gatk4/mergebamalignment/main.nf | 26 ++-- modules/gatk4/mergevcfs/functions.nf | 78 ---------- modules/gatk4/mergevcfs/main.nf | 26 ++-- modules/gatk4/mutect2/functions.nf | 78 ---------- modules/gatk4/mutect2/main.nf | 26 ++-- modules/gatk4/revertsam/functions.nf | 78 ---------- modules/gatk4/revertsam/main.nf | 26 ++-- modules/gatk4/samtofastq/functions.nf | 78 ---------- modules/gatk4/samtofastq/main.nf | 26 ++-- modules/gatk4/splitncigarreads/functions.nf | 78 ---------- modules/gatk4/splitncigarreads/main.nf | 26 ++-- modules/gatk4/variantfiltration/functions.nf | 78 ---------- modules/gatk4/variantfiltration/main.nf | 26 ++-- modules/genmap/index/functions.nf | 78 ---------- modules/genmap/index/main.nf | 22 +-- modules/genmap/mappability/functions.nf | 78 ---------- modules/genmap/mappability/main.nf | 24 +--- modules/genrich/functions.nf | 78 ---------- modules/genrich/main.nf | 28 ++-- modules/gffread/functions.nf | 78 ---------- modules/gffread/main.nf | 26 ++-- modules/glnexus/functions.nf | 78 ---------- modules/glnexus/main.nf | 26 ++-- modules/graphmap2/align/functions.nf | 78 ---------- modules/graphmap2/align/main.nf | 26 ++-- modules/graphmap2/index/functions.nf | 78 ---------- modules/graphmap2/index/main.nf | 24 +--- modules/gstama/collapse/functions.nf | 78 ---------- modules/gstama/collapse/main.nf | 27 ++-- modules/gstama/merge/functions.nf | 78 ---------- modules/gstama/merge/main.nf | 26 ++-- modules/gtdbtk/classifywf/functions.nf | 78 ---------- modules/gtdbtk/classifywf/main.nf | 29 ++-- modules/gubbins/functions.nf | 78 ---------- modules/gubbins/main.nf | 24 +--- modules/gunc/downloaddb/functions.nf | 78 ---------- modules/gunc/downloaddb/main.nf | 24 +--- modules/gunc/run/functions.nf | 78 ---------- modules/gunc/run/main.nf | 26 ++-- modules/gunzip/functions.nf | 78 ---------- modules/gunzip/main.nf | 24 +--- modules/gunzip/test.txt.gz | Bin 47 -> 0 bytes modules/hicap/functions.nf | 78 ---------- modules/hicap/main.nf | 26 ++-- modules/hifiasm/functions.nf | 78 ---------- modules/hifiasm/main.nf | 32 ++--- modules/hisat2/align/functions.nf | 78 ---------- modules/hisat2/align/main.nf | 41 ++---- modules/hisat2/build/functions.nf | 78 ---------- modules/hisat2/build/main.nf | 27 ++-- .../hisat2/extractsplicesites/functions.nf | 78 ---------- modules/hisat2/extractsplicesites/main.nf | 24 +--- modules/hmmcopy/gccounter/functions.nf | 78 ---------- modules/hmmcopy/gccounter/main.nf | 26 ++-- modules/hmmcopy/readcounter/functions.nf | 78 ---------- modules/hmmcopy/readcounter/main.nf | 34 ++--- modules/hmmer/hmmalign/functions.nf | 78 ---------- modules/hmmer/hmmalign/main.nf | 26 ++-- modules/homer/annotatepeaks/functions.nf | 78 ---------- modules/homer/annotatepeaks/main.nf | 28 ++-- modules/homer/findpeaks/functions.nf | 78 ---------- modules/homer/findpeaks/main.nf | 28 ++-- modules/homer/maketagdirectory/functions.nf | 78 ---------- modules/homer/maketagdirectory/main.nf | 30 ++-- modules/homer/makeucscfile/functions.nf | 78 ---------- modules/homer/makeucscfile/main.nf | 30 ++-- modules/idr/functions.nf | 78 ---------- modules/idr/main.nf | 24 +--- modules/imputeme/vcftoprs/functions.nf | 78 ---------- modules/imputeme/vcftoprs/main.nf | 27 ++-- modules/iqtree/functions.nf | 78 ---------- modules/iqtree/main.nf | 24 +--- modules/ismapper/functions.nf | 78 ---------- modules/ismapper/main.nf | 26 ++-- modules/isoseq3/cluster/functions.nf | 78 ---------- modules/isoseq3/cluster/main.nf | 30 ++-- modules/isoseq3/refine/functions.nf | 78 ---------- modules/isoseq3/refine/main.nf | 26 ++-- modules/ivar/consensus/functions.nf | 78 ---------- modules/ivar/consensus/main.nf | 29 ++-- modules/ivar/trim/functions.nf | 78 ---------- modules/ivar/trim/main.nf | 26 ++-- modules/ivar/variants/functions.nf | 78 ---------- modules/ivar/variants/main.nf | 29 ++-- modules/jupyternotebook/functions.nf | 78 ---------- modules/jupyternotebook/main.nf | 41 +++--- modules/kallisto/index/functions.nf | 78 ---------- modules/kallisto/index/main.nf | 24 +--- modules/kallistobustools/count/functions.nf | 78 ---------- modules/kallistobustools/count/main.nf | 30 ++-- modules/kallistobustools/count/meta.yml | 4 +- modules/kallistobustools/ref/functions.nf | 78 ---------- modules/kallistobustools/ref/main.nf | 34 ++--- modules/kallistobustools/ref/meta.yml | 4 +- modules/khmer/normalizebymedian/functions.nf | 78 ---------- modules/khmer/normalizebymedian/main.nf | 31 ++-- modules/kleborate/functions.nf | 78 ---------- modules/kleborate/main.nf | 26 ++-- modules/kraken2/kraken2/functions.nf | 78 ---------- modules/kraken2/kraken2/main.nf | 26 ++-- modules/krona/kronadb/main.nf | 27 ++++ .../{kronatools => krona}/kronadb/meta.yml | 4 +- modules/krona/ktimporttaxonomy/main.nf | 30 ++++ .../ktimporttaxonomy/meta.yml | 6 +- modules/kronatools/kronadb/functions.nf | 78 ---------- modules/kronatools/kronadb/main.nf | 35 ----- .../kronatools/ktimporttaxonomy/functions.nf | 78 ---------- modules/kronatools/ktimporttaxonomy/main.nf | 39 ----- modules/last/dotplot/functions.nf | 78 ---------- modules/last/dotplot/main.nf | 26 ++-- modules/last/lastal/functions.nf | 78 ---------- modules/last/lastal/main.nf | 26 ++-- modules/last/lastdb/functions.nf | 78 ---------- modules/last/lastdb/main.nf | 26 ++-- modules/last/mafconvert/functions.nf | 78 ---------- modules/last/mafconvert/main.nf | 26 ++-- modules/last/mafswap/functions.nf | 78 ---------- modules/last/mafswap/main.nf | 26 ++-- modules/last/postmask/functions.nf | 78 ---------- modules/last/postmask/main.nf | 26 ++-- modules/last/split/functions.nf | 78 ---------- modules/last/split/main.nf | 26 ++-- modules/last/train/functions.nf | 78 ---------- modules/last/train/main.nf | 26 ++-- modules/leehom/functions.nf | 78 ---------- modules/leehom/main.nf | 101 ++++++------- modules/lib/functions.nf | 78 ---------- modules/lima/functions.nf | 78 ---------- modules/lima/main.nf | 26 ++-- modules/lissero/functions.nf | 78 ---------- modules/lissero/main.nf | 26 ++-- modules/lofreq/call/functions.nf | 78 ---------- modules/lofreq/call/main.nf | 26 ++-- modules/lofreq/callparallel/functions.nf | 78 ---------- modules/lofreq/callparallel/main.nf | 26 ++-- modules/lofreq/filter/functions.nf | 78 ---------- modules/lofreq/filter/main.nf | 26 ++-- modules/lofreq/indelqual/functions.nf | 78 ---------- modules/lofreq/indelqual/main.nf | 25 ++-- modules/macs2/callpeak/functions.nf | 78 ---------- modules/macs2/callpeak/main.nf | 42 +++--- modules/malt/build/functions.nf | 78 ---------- modules/malt/build/main.nf | 26 ++-- modules/malt/run/functions.nf | 78 ---------- modules/malt/run/main.nf | 26 ++-- modules/maltextract/functions.nf | 78 ---------- modules/maltextract/main.nf | 24 +--- modules/manta/germline/functions.nf | 78 ---------- modules/manta/germline/main.nf | 25 +--- modules/manta/somatic/functions.nf | 78 ---------- modules/manta/somatic/main.nf | 24 +--- modules/manta/tumoronly/functions.nf | 78 ---------- modules/manta/tumoronly/main.nf | 25 +--- modules/mapdamage2/functions.nf | 78 ---------- modules/mapdamage2/main.nf | 25 ++-- modules/mash/sketch/functions.nf | 78 ---------- modules/mash/sketch/main.nf | 25 ++-- modules/mashtree/functions.nf | 78 ---------- modules/mashtree/main.nf | 26 ++-- modules/maxbin2/functions.nf | 78 ---------- modules/maxbin2/main.nf | 24 +--- modules/medaka/functions.nf | 78 ---------- modules/medaka/main.nf | 26 ++-- modules/megahit/functions.nf | 78 ---------- modules/megahit/main.nf | 37 ++--- modules/meningotype/functions.nf | 78 ---------- modules/meningotype/main.nf | 26 ++-- .../jgisummarizebamcontigdepths/functions.nf | 78 ---------- .../jgisummarizebamcontigdepths/main.nf | 25 ++-- modules/metabat2/metabat2/functions.nf | 78 ---------- modules/metabat2/metabat2/main.nf | 25 ++-- modules/metaphlan3/functions.nf | 78 ---------- modules/metaphlan3/main.nf | 26 ++-- modules/methyldackel/extract/functions.nf | 78 ---------- modules/methyldackel/extract/main.nf | 24 +--- modules/methyldackel/mbias/functions.nf | 78 ---------- modules/methyldackel/mbias/main.nf | 26 ++-- modules/minia/functions.nf | 78 ---------- modules/minia/main.nf | 26 ++-- modules/miniasm/functions.nf | 78 ---------- modules/miniasm/main.nf | 26 ++-- modules/minimap2/align/functions.nf | 78 ---------- modules/minimap2/align/main.nf | 26 ++-- modules/minimap2/index/functions.nf | 78 ---------- modules/minimap2/index/main.nf | 24 +--- modules/mlst/functions.nf | 78 ---------- modules/mlst/main.nf | 24 +--- modules/mosdepth/functions.nf | 78 ---------- modules/mosdepth/main.nf | 26 ++-- modules/msisensor/msi/functions.nf | 78 ---------- modules/msisensor/msi/main.nf | 26 ++-- modules/msisensor/scan/functions.nf | 78 ---------- modules/msisensor/scan/main.nf | 26 ++-- modules/mtnucratio/functions.nf | 78 ---------- modules/mtnucratio/main.nf | 25 ++-- modules/multiqc/functions.nf | 78 ---------- modules/multiqc/main.nf | 24 +--- modules/mummer/functions.nf | 78 ---------- modules/mummer/main.nf | 28 ++-- modules/muscle/functions.nf | 78 ---------- modules/muscle/main.nf | 41 ++---- modules/nanolyse/functions.nf | 78 ---------- modules/nanolyse/main.nf | 24 +--- modules/nanoplot/functions.nf | 78 ---------- modules/nanoplot/main.nf | 24 +--- modules/ncbigenomedownload/functions.nf | 78 ---------- modules/ncbigenomedownload/main.nf | 26 ++-- modules/nextclade/functions.nf | 78 ---------- modules/nextclade/main.nf | 26 ++-- modules/ngmaster/functions.nf | 78 ---------- modules/ngmaster/main.nf | 26 ++-- modules/nucmer/functions.nf | 78 ---------- modules/nucmer/main.nf | 26 ++-- modules/optitype/functions.nf | 78 ---------- modules/optitype/main.nf | 33 ++--- modules/pairix/functions.nf | 78 ---------- modules/pairix/main.nf | 24 +--- modules/pairtools/dedup/functions.nf | 78 ---------- modules/pairtools/dedup/main.nf | 26 ++-- modules/pairtools/flip/functions.nf | 78 ---------- modules/pairtools/flip/main.nf | 26 ++-- modules/pairtools/parse/functions.nf | 78 ---------- modules/pairtools/parse/main.nf | 26 ++-- modules/pairtools/restrict/functions.nf | 78 ---------- modules/pairtools/restrict/main.nf | 26 ++-- modules/pairtools/select/functions.nf | 78 ---------- modules/pairtools/select/main.nf | 26 ++-- modules/pairtools/sort/functions.nf | 78 ---------- modules/pairtools/sort/main.nf | 26 ++-- modules/pangolin/functions.nf | 78 ---------- modules/pangolin/main.nf | 26 ++-- modules/paraclu/functions.nf | 78 ---------- modules/paraclu/main.nf | 25 ++-- modules/pbbam/pbmerge/functions.nf | 78 ---------- modules/pbbam/pbmerge/main.nf | 26 ++-- modules/pbccs/functions.nf | 78 ---------- modules/pbccs/main.nf | 26 ++-- modules/peddy/functions.nf | 78 ---------- modules/peddy/main.nf | 26 ++-- modules/phantompeakqualtools/functions.nf | 78 ---------- modules/phantompeakqualtools/main.nf | 27 ++-- modules/phyloflash/functions.nf | 78 ---------- modules/phyloflash/main.nf | 41 ++---- modules/picard/collecthsmetrics/functions.nf | 78 ---------- modules/picard/collecthsmetrics/main.nf | 26 ++-- .../collectmultiplemetrics/functions.nf | 78 ---------- modules/picard/collectmultiplemetrics/main.nf | 26 ++-- modules/picard/collectwgsmetrics/functions.nf | 78 ---------- modules/picard/collectwgsmetrics/main.nf | 26 ++-- modules/picard/filtersamreads/functions.nf | 78 ---------- modules/picard/filtersamreads/main.nf | 32 ++--- modules/picard/markduplicates/functions.nf | 78 ---------- modules/picard/markduplicates/main.nf | 26 ++-- modules/picard/mergesamfiles/functions.nf | 78 ---------- modules/picard/mergesamfiles/main.nf | 30 ++-- modules/picard/sortsam/functions.nf | 78 ---------- modules/picard/sortsam/main.nf | 25 +--- modules/pirate/functions.nf | 78 ---------- modules/pirate/main.nf | 26 ++-- modules/plasmidid/functions.nf | 78 ---------- modules/plasmidid/main.nf | 26 ++-- modules/plink/extract/functions.nf | 78 ---------- modules/plink/extract/main.nf | 26 ++-- modules/plink/vcf/functions.nf | 78 ---------- modules/plink/vcf/main.nf | 26 ++-- modules/plink2/vcf/functions.nf | 78 ---------- modules/plink2/vcf/main.nf | 26 ++-- modules/pmdtools/filter/functions.nf | 78 ---------- modules/pmdtools/filter/main.nf | 30 ++-- modules/porechop/functions.nf | 78 ---------- modules/porechop/main.nf | 34 ++--- modules/preseq/lcextrap/functions.nf | 78 ---------- modules/preseq/lcextrap/main.nf | 26 ++-- modules/prodigal/functions.nf | 78 ---------- modules/prodigal/main.nf | 26 ++-- modules/prokka/functions.nf | 78 ---------- modules/prokka/main.nf | 25 ++-- modules/pycoqc/functions.nf | 78 ---------- modules/pycoqc/main.nf | 24 +--- modules/pydamage/analyze/functions.nf | 78 ---------- modules/pydamage/analyze/main.nf | 26 ++-- modules/pydamage/filter/functions.nf | 78 ---------- modules/pydamage/filter/main.nf | 26 ++-- modules/qcat/functions.nf | 78 ---------- modules/qcat/main.nf | 24 +--- modules/qualimap/bamqc/functions.nf | 78 ---------- modules/qualimap/bamqc/main.nf | 26 ++-- modules/qualimap/rnaseq/functions.nf | 78 ---------- modules/qualimap/rnaseq/main.nf | 26 ++-- modules/quast/functions.nf | 78 ---------- modules/quast/main.nf | 28 ++-- modules/racon/functions.nf | 78 ---------- modules/racon/main.nf | 28 ++-- modules/rapidnj/functions.nf | 78 ---------- modules/rapidnj/main.nf | 26 ++-- modules/rasusa/functions.nf | 78 ---------- modules/rasusa/main.nf | 26 ++-- modules/raxmlng/functions.nf | 78 ---------- modules/raxmlng/main.nf | 24 +--- modules/rmarkdownnotebook/functions.nf | 78 ---------- modules/rmarkdownnotebook/main.nf | 41 +++--- modules/roary/functions.nf | 78 ---------- modules/roary/main.nf | 26 ++-- modules/rsem/calculateexpression/functions.nf | 78 ---------- modules/rsem/calculateexpression/main.nf | 26 ++-- modules/rsem/preparereference/functions.nf | 78 ---------- modules/rsem/preparereference/main.nf | 39 ++--- modules/rseqc/bamstat/functions.nf | 78 ---------- modules/rseqc/bamstat/main.nf | 26 ++-- modules/rseqc/inferexperiment/functions.nf | 78 ---------- modules/rseqc/inferexperiment/main.nf | 26 ++-- modules/rseqc/innerdistance/functions.nf | 78 ---------- modules/rseqc/innerdistance/main.nf | 30 ++-- modules/rseqc/junctionannotation/functions.nf | 78 ---------- modules/rseqc/junctionannotation/main.nf | 26 ++-- modules/rseqc/junctionsaturation/functions.nf | 78 ---------- modules/rseqc/junctionsaturation/main.nf | 26 ++-- modules/rseqc/readdistribution/functions.nf | 78 ---------- modules/rseqc/readdistribution/main.nf | 24 +--- modules/rseqc/readduplication/functions.nf | 78 ---------- modules/rseqc/readduplication/main.nf | 26 ++-- modules/salmon/index/functions.nf | 78 ---------- modules/salmon/index/main.nf | 24 +--- modules/salmon/quant/functions.nf | 78 ---------- modules/salmon/quant/main.nf | 26 ++-- modules/samblaster/functions.nf | 78 ---------- modules/samblaster/main.nf | 32 ++--- modules/samtools/ampliconclip/functions.nf | 78 ---------- modules/samtools/ampliconclip/main.nf | 26 ++-- modules/samtools/bam2fq/functions.nf | 78 ---------- modules/samtools/bam2fq/main.nf | 32 ++--- modules/samtools/depth/functions.nf | 78 ---------- modules/samtools/depth/main.nf | 26 ++-- modules/samtools/faidx/functions.nf | 78 ---------- modules/samtools/faidx/main.nf | 22 +-- modules/samtools/fastq/functions.nf | 78 ---------- modules/samtools/fastq/main.nf | 26 ++-- modules/samtools/fixmate/functions.nf | 78 ---------- modules/samtools/fixmate/main.nf | 26 ++-- modules/samtools/flagstat/functions.nf | 78 ---------- modules/samtools/flagstat/main.nf | 22 +-- modules/samtools/idxstats/functions.nf | 78 ---------- modules/samtools/idxstats/main.nf | 22 +-- modules/samtools/index/functions.nf | 78 ---------- modules/samtools/index/main.nf | 24 +--- modules/samtools/merge/functions.nf | 78 ---------- modules/samtools/merge/main.nf | 26 ++-- modules/samtools/mpileup/functions.nf | 78 ---------- modules/samtools/mpileup/main.nf | 26 ++-- modules/samtools/sort/functions.nf | 78 ---------- modules/samtools/sort/main.nf | 26 ++-- modules/samtools/stats/functions.nf | 78 ---------- modules/samtools/stats/main.nf | 22 +-- modules/samtools/view/functions.nf | 78 ---------- modules/samtools/view/main.nf | 26 ++-- modules/scoary/functions.nf | 78 ---------- modules/scoary/main.nf | 26 ++-- modules/seacr/callpeak/functions.nf | 78 ---------- modules/seacr/callpeak/main.nf | 29 ++-- modules/seqkit/split2/functions.nf | 78 ---------- modules/seqkit/split2/main.nf | 32 ++--- modules/seqsero2/functions.nf | 78 ---------- modules/seqsero2/main.nf | 26 ++-- modules/seqtk/mergepe/functions.nf | 78 ---------- modules/seqtk/mergepe/main.nf | 30 ++-- modules/seqtk/sample/functions.nf | 78 ---------- modules/seqtk/sample/main.nf | 38 ++--- modules/seqtk/subseq/functions.nf | 78 ---------- modules/seqtk/subseq/main.nf | 26 ++-- modules/sequenzautils/bam2seqz/functions.nf | 78 ---------- modules/sequenzautils/bam2seqz/main.nf | 26 ++-- modules/sequenzautils/gcwiggle/functions.nf | 78 ---------- modules/sequenzautils/gcwiggle/main.nf | 26 ++-- modules/seqwish/induce/functions.nf | 78 ---------- modules/seqwish/induce/main.nf | 28 ++-- modules/shovill/functions.nf | 78 ---------- modules/shovill/main.nf | 24 +--- modules/snpdists/functions.nf | 78 ---------- modules/snpdists/main.nf | 26 ++-- modules/snpeff/functions.nf | 78 ---------- modules/snpeff/main.nf | 39 ++--- modules/snpsites/functions.nf | 78 ---------- modules/snpsites/main.nf | 24 +--- modules/sortmerna/functions.nf | 78 ---------- modules/sortmerna/main.nf | 32 ++--- modules/spades/functions.nf | 78 ---------- modules/spades/main.nf | 26 ++-- modules/spatyper/functions.nf | 78 ---------- modules/spatyper/main.nf | 26 ++-- modules/sratools/fasterqdump/functions.nf | 78 ---------- modules/sratools/fasterqdump/main.nf | 27 ++-- modules/sratools/prefetch/functions.nf | 78 ---------- modules/sratools/prefetch/main.nf | 24 +--- modules/staphopiasccmec/functions.nf | 78 ---------- modules/staphopiasccmec/main.nf | 26 ++-- modules/star/align/functions.nf | 78 ---------- modules/star/align/main.nf | 39 +++-- modules/star/genomegenerate/functions.nf | 78 ---------- modules/star/genomegenerate/main.nf | 34 ++--- modules/strelka/germline/functions.nf | 78 ---------- modules/strelka/germline/main.nf | 27 ++-- modules/strelka/somatic/functions.nf | 78 ---------- modules/strelka/somatic/main.nf | 26 ++-- modules/stringtie/merge/functions.nf | 78 ---------- modules/stringtie/merge/main.nf | 22 +-- modules/stringtie/stringtie/functions.nf | 78 ---------- modules/stringtie/stringtie/main.nf | 26 ++-- modules/subread/featurecounts/functions.nf | 78 ---------- modules/subread/featurecounts/main.nf | 26 ++-- modules/tabix/bgzip/functions.nf | 78 ---------- modules/tabix/bgzip/main.nf | 26 ++-- modules/tabix/bgziptabix/functions.nf | 78 ---------- modules/tabix/bgziptabix/main.nf | 29 ++-- modules/tabix/tabix/functions.nf | 78 ---------- modules/tabix/tabix/main.nf | 24 +--- modules/tbprofiler/profile/functions.nf | 78 ---------- modules/tbprofiler/profile/main.nf | 26 ++-- modules/tiddit/cov/functions.nf | 78 ---------- modules/tiddit/cov/main.nf | 29 ++-- modules/tiddit/sv/functions.nf | 78 ---------- modules/tiddit/sv/main.nf | 26 ++-- modules/trimgalore/functions.nf | 78 ---------- modules/trimgalore/main.nf | 32 ++--- modules/ucsc/bed12tobigbed/functions.nf | 78 ---------- modules/ucsc/bed12tobigbed/main.nf | 26 ++-- modules/ucsc/bedclip/functions.nf | 78 ---------- modules/ucsc/bedclip/main.nf | 26 ++-- modules/ucsc/bedgraphtobigwig/functions.nf | 78 ---------- modules/ucsc/bedgraphtobigwig/main.nf | 26 ++-- .../ucsc/bigwigaverageoverbed/functions.nf | 78 ---------- modules/ucsc/bigwigaverageoverbed/main.nf | 30 ++-- modules/ucsc/liftover/functions.nf | 78 ---------- modules/ucsc/liftover/main.nf | 28 ++-- modules/ucsc/wigtobigwig/functions.nf | 78 ---------- modules/ucsc/wigtobigwig/main.nf | 35 ++--- modules/ultra/pipeline/functions.nf | 78 ---------- modules/ultra/pipeline/main.nf | 26 ++-- modules/umitools/dedup/functions.nf | 78 ---------- modules/umitools/dedup/main.nf | 26 ++-- modules/umitools/extract/functions.nf | 78 ---------- modules/umitools/extract/main.nf | 32 ++--- modules/unicycler/functions.nf | 78 ---------- modules/unicycler/main.nf | 26 ++-- modules/untar/functions.nf | 78 ---------- modules/untar/main.nf | 28 ++-- modules/unzip/functions.nf | 78 ---------- modules/unzip/main.nf | 24 +--- modules/variantbam/functions.nf | 78 ---------- modules/variantbam/main.nf | 32 ++--- modules/vcftools/functions.nf | 78 ---------- modules/vcftools/main.nf | 52 +++---- modules/yara/index/functions.nf | 78 ---------- modules/yara/index/main.nf | 22 +-- modules/yara/mapper/functions.nf | 78 ---------- modules/yara/mapper/main.nf | 34 ++--- tests/config/nextflow.config | 8 +- tests/config/pytest_modules.yml | 122 ++++++++-------- tests/config/test_data.config | 77 ++++++---- tests/modules/abacas/main.nf | 2 +- tests/modules/abacas/nextflow.config | 9 ++ tests/modules/abacas/test.yml | 2 +- tests/modules/adapterremoval/main.nf | 2 +- tests/modules/adapterremoval/nextflow.config | 5 + tests/modules/adapterremoval/test.yml | 6 +- tests/modules/agrvate/main.nf | 2 +- tests/modules/agrvate/nextflow.config | 9 ++ tests/modules/agrvate/test.yml | 2 +- tests/modules/allelecounter/main.nf | 2 +- tests/modules/allelecounter/nextflow.config | 5 + tests/modules/allelecounter/test.yml | 4 +- tests/modules/amps/main.nf | 12 +- tests/modules/amps/nextflow.config | 9 ++ tests/modules/amps/test.yml | 2 +- tests/modules/arriba/main.nf | 16 ++- tests/modules/arriba/nextflow.config | 13 ++ tests/modules/arriba/test.yml | 70 +++++---- tests/modules/artic/guppyplex/main.nf | 2 +- tests/modules/artic/guppyplex/nextflow.config | 5 + tests/modules/artic/guppyplex/test.yml | 2 +- tests/modules/artic/minion/main.nf | 18 +-- tests/modules/artic/minion/nextflow.config | 5 + tests/modules/artic/minion/test.yml | 2 +- tests/modules/assemblyscan/main.nf | 2 +- tests/modules/assemblyscan/nextflow.config | 5 + tests/modules/assemblyscan/test.yml | 2 +- tests/modules/ataqv/ataqv/main.nf | 4 +- tests/modules/ataqv/ataqv/nextflow.config | 9 ++ tests/modules/ataqv/ataqv/test.yml | 10 +- tests/modules/bakta/main.nf | 10 +- tests/modules/bakta/nextflow.config | 5 + tests/modules/bamaligncleaner/main.nf | 2 +- tests/modules/bamaligncleaner/nextflow.config | 5 + tests/modules/bamaligncleaner/test.yml | 2 +- tests/modules/bamtools/split/main.nf | 2 +- tests/modules/bamtools/split/nextflow.config | 9 ++ tests/modules/bamtools/split/test.yml | 2 +- tests/modules/bamutil/trimbam/main.nf | 2 +- tests/modules/bamutil/trimbam/nextflow.config | 5 + tests/modules/bamutil/trimbam/test.yml | 2 +- tests/modules/bandage/image/main.nf | 2 +- tests/modules/bandage/image/nextflow.config | 5 + tests/modules/bandage/image/test.yml | 2 +- tests/modules/bbmap/align/main.nf | 6 +- tests/modules/bbmap/align/nextflow.config | 9 ++ tests/modules/bbmap/align/test.yml | 8 +- tests/modules/bbmap/bbduk/main.nf | 2 +- tests/modules/bbmap/bbduk/nextflow.config | 10 ++ tests/modules/bbmap/bbduk/test.yml | 8 +- tests/modules/bbmap/bbsplit/main.nf | 4 +- tests/modules/bbmap/bbsplit/nextflow.config | 5 + tests/modules/bbmap/bbsplit/test.yml | 2 +- tests/modules/bbmap/index/main.nf | 2 +- tests/modules/bbmap/index/nextflow.config | 5 + tests/modules/bbmap/index/test.yml | 4 +- tests/modules/bcftools/concat/main.nf | 2 +- tests/modules/bcftools/concat/nextflow.config | 9 ++ tests/modules/bcftools/concat/test.yml | 2 +- tests/modules/bcftools/consensus/main.nf | 2 +- .../bcftools/consensus/nextflow.config | 5 + tests/modules/bcftools/consensus/test.yml | 2 +- tests/modules/bcftools/filter/main.nf | 2 +- tests/modules/bcftools/filter/nextflow.config | 9 ++ tests/modules/bcftools/filter/test.yml | 2 +- tests/modules/bcftools/index/main.nf | 4 +- tests/modules/bcftools/index/nextflow.config | 9 ++ tests/modules/bcftools/index/test.yml | 4 +- tests/modules/bcftools/isec/main.nf | 2 +- tests/modules/bcftools/isec/nextflow.config | 9 ++ tests/modules/bcftools/isec/test.yml | 2 +- tests/modules/bcftools/merge/main.nf | 2 +- tests/modules/bcftools/merge/nextflow.config | 9 ++ tests/modules/bcftools/merge/test.yml | 2 +- tests/modules/bcftools/mpileup/main.nf | 3 +- .../modules/bcftools/mpileup/nextflow.config | 10 ++ tests/modules/bcftools/mpileup/test.yml | 2 +- tests/modules/bcftools/norm/main.nf | 2 +- tests/modules/bcftools/norm/nextflow.config | 9 ++ tests/modules/bcftools/norm/test.yml | 2 +- tests/modules/bcftools/query/main.nf | 2 +- tests/modules/bcftools/query/nextflow.config | 9 ++ tests/modules/bcftools/query/test.yml | 4 +- tests/modules/bcftools/reheader/main.nf | 2 +- .../modules/bcftools/reheader/nextflow.config | 9 ++ tests/modules/bcftools/reheader/test.yml | 6 +- tests/modules/bcftools/stats/main.nf | 2 +- tests/modules/bcftools/stats/nextflow.config | 5 + tests/modules/bcftools/stats/test.yml | 2 +- tests/modules/bcftools/view/main.nf | 2 +- tests/modules/bcftools/view/nextflow.config | 9 ++ tests/modules/bcftools/view/test.yml | 4 +- tests/modules/bedtools/bamtobed/main.nf | 2 +- .../modules/bedtools/bamtobed/nextflow.config | 5 + tests/modules/bedtools/bamtobed/test.yml | 2 +- tests/modules/bedtools/complement/main.nf | 2 +- .../bedtools/complement/nextflow.config | 9 ++ tests/modules/bedtools/complement/test.yml | 2 +- tests/modules/bedtools/genomecov/main.nf | 2 +- .../bedtools/genomecov/nextflow.config | 9 ++ tests/modules/bedtools/genomecov/test.yml | 8 +- tests/modules/bedtools/getfasta/main.nf | 2 +- .../modules/bedtools/getfasta/nextflow.config | 5 + tests/modules/bedtools/getfasta/test.yml | 2 +- tests/modules/bedtools/intersect/main.nf | 2 +- .../bedtools/intersect/nextflow.config | 9 ++ tests/modules/bedtools/intersect/test.yml | 4 +- tests/modules/bedtools/makewindows/main.nf | 11 +- .../bedtools/makewindows/nextflow.config | 9 ++ tests/modules/bedtools/makewindows/test.yml | 2 +- tests/modules/bedtools/maskfasta/main.nf | 2 +- .../bedtools/maskfasta/nextflow.config | 5 + tests/modules/bedtools/maskfasta/test.yml | 2 +- tests/modules/bedtools/merge/main.nf | 2 +- tests/modules/bedtools/merge/nextflow.config | 9 ++ tests/modules/bedtools/merge/test.yml | 2 +- tests/modules/bedtools/slop/main.nf | 2 +- tests/modules/bedtools/slop/nextflow.config | 10 ++ tests/modules/bedtools/slop/test.yml | 2 +- tests/modules/bedtools/sort/main.nf | 2 +- tests/modules/bedtools/sort/nextflow.config | 9 ++ tests/modules/bedtools/sort/test.yml | 2 +- tests/modules/bedtools/subtract/main.nf | 2 +- .../modules/bedtools/subtract/nextflow.config | 5 + tests/modules/bedtools/subtract/test.yml | 2 +- tests/modules/bismark/align/main.nf | 6 +- tests/modules/bismark/align/nextflow.config | 5 + tests/modules/bismark/align/test.yml | 12 +- tests/modules/bismark/deduplicate/main.nf | 2 +- .../bismark/deduplicate/nextflow.config | 5 + tests/modules/bismark/deduplicate/test.yml | 2 +- .../modules/bismark/genomepreparation/main.nf | 2 +- .../bismark/genomepreparation/nextflow.config | 5 + .../bismark/genomepreparation/test.yml | 2 +- .../bismark/methylationextractor/main.nf | 4 +- .../methylationextractor/nextflow.config | 5 + .../bismark/methylationextractor/test.yml | 2 +- tests/modules/bismark/report/main.nf | 10 +- tests/modules/bismark/report/nextflow.config | 5 + tests/modules/bismark/report/test.yml | 2 +- tests/modules/bismark/summary/main.nf | 10 +- tests/modules/bismark/summary/nextflow.config | 5 + tests/modules/bismark/summary/test.yml | 2 +- tests/modules/blast/blastn/main.nf | 4 +- tests/modules/blast/blastn/nextflow.config | 9 ++ tests/modules/blast/blastn/test.yml | 2 +- tests/modules/blast/makeblastdb/main.nf | 2 +- .../modules/blast/makeblastdb/nextflow.config | 9 ++ tests/modules/blast/makeblastdb/test.yml | 2 +- tests/modules/bowtie/align/main.nf | 24 ++-- tests/modules/bowtie/align/nextflow.config | 5 + tests/modules/bowtie/align/test.yml | 28 ++-- tests/modules/bowtie/build_test/main.nf | 2 +- .../modules/bowtie/build_test/nextflow.config | 5 + tests/modules/bowtie/build_test/test.yml | 2 +- tests/modules/bowtie2/align/main.nf | 25 ++-- tests/modules/bowtie2/align/nextflow.config | 5 + tests/modules/bowtie2/align/test.yml | 28 ++-- tests/modules/bowtie2/build_test/main.nf | 2 +- .../bowtie2/build_test/nextflow.config | 5 + tests/modules/bowtie2/build_test/test.yml | 2 +- tests/modules/bwa/aln/main.nf | 24 ++-- tests/modules/bwa/aln/nextflow.config | 5 + tests/modules/bwa/aln/test.yml | 24 ++-- tests/modules/bwa/index/main.nf | 2 +- tests/modules/bwa/index/nextflow.config | 5 + tests/modules/bwa/index/test.yml | 2 +- tests/modules/bwa/mem/main.nf | 24 ++-- tests/modules/bwa/mem/nextflow.config | 5 + tests/modules/bwa/mem/test.yml | 24 ++-- tests/modules/bwa/sampe/main.nf | 6 +- tests/modules/bwa/sampe/nextflow.config | 5 + tests/modules/bwa/sampe/test.yml | 2 +- tests/modules/bwa/samse/main.nf | 6 +- tests/modules/bwa/samse/nextflow.config | 5 + tests/modules/bwa/samse/test.yml | 2 +- tests/modules/bwamem2/index/main.nf | 2 +- tests/modules/bwamem2/index/nextflow.config | 5 + tests/modules/bwamem2/index/test.yml | 2 +- tests/modules/bwamem2/mem/main.nf | 24 ++-- tests/modules/bwamem2/mem/nextflow.config | 5 + tests/modules/bwamem2/mem/test.yml | 24 ++-- tests/modules/bwameth/align/main.nf | 24 ++-- tests/modules/bwameth/align/nextflow.config | 5 + tests/modules/bwameth/align/test.yml | 4 +- tests/modules/bwameth/index/main.nf | 2 +- tests/modules/bwameth/index/nextflow.config | 5 + tests/modules/bwameth/index/test.yml | 2 +- tests/modules/cat/cat/main.nf | 2 +- tests/modules/cat/cat/nextflow.config | 5 + tests/modules/cat/cat/test.yml | 8 +- tests/modules/cat/fastq/main.nf | 2 +- tests/modules/cat/fastq/nextflow.config | 5 + tests/modules/cat/fastq/test.yml | 4 +- tests/modules/cellranger/mkref/main.nf | 2 +- .../modules/cellranger/mkref/nextflow.config | 5 + tests/modules/cellranger/mkref/test.yml | 2 +- tests/modules/checkm/lineagewf/main.nf | 2 +- .../modules/checkm/lineagewf/nextflow.config | 5 + tests/modules/checkm/lineagewf/test.yml | 4 +- tests/modules/chromap/chromap/main.nf | 19 ++- tests/modules/chromap/chromap/nextflow.config | 9 ++ tests/modules/chromap/chromap/test.yml | 6 +- tests/modules/chromap/index/main.nf | 2 +- tests/modules/chromap/index/nextflow.config | 5 + tests/modules/chromap/index/test.yml | 2 +- tests/modules/clonalframeml/main.nf | 12 +- tests/modules/clonalframeml/nextflow.config | 5 + tests/modules/clonalframeml/test.yml | 2 +- tests/modules/cmseq/polymut/main.nf | 2 +- tests/modules/cmseq/polymut/nextflow.config | 5 + tests/modules/cmseq/polymut/test.yml | 6 +- tests/modules/cnvkit/batch/main.nf | 61 ++++---- tests/modules/cnvkit/batch/nextflow.config | 17 +++ tests/modules/cnvkit/batch/test.yml | 8 +- tests/modules/cooler/cload/main.nf | 12 +- tests/modules/cooler/cload/nextflow.config | 17 +++ tests/modules/cooler/cload/test.yml | 6 +- tests/modules/cooler/digest/main.nf | 2 +- tests/modules/cooler/digest/nextflow.config | 5 + tests/modules/cooler/digest/test.yml | 2 +- tests/modules/cooler/dump/main.nf | 2 +- tests/modules/cooler/dump/nextflow.config | 5 + tests/modules/cooler/dump/test.yml | 2 +- tests/modules/cooler/merge/main.nf | 18 ++- tests/modules/cooler/merge/nextflow.config | 5 + tests/modules/cooler/merge/test.yml | 2 +- tests/modules/cooler/zoomify/main.nf | 11 +- tests/modules/cooler/zoomify/nextflow.config | 9 ++ tests/modules/cooler/zoomify/test.yml | 4 +- tests/modules/csvtk/concat/main.nf | 2 +- tests/modules/csvtk/concat/nextflow.config | 5 + tests/modules/csvtk/concat/test.yml | 2 +- tests/modules/csvtk/split/main.nf | 2 +- tests/modules/csvtk/split/nextflow.config | 9 ++ tests/modules/csvtk/split/test.yml | 4 +- .../custom/dumpsoftwareversions/main.nf | 51 +++++-- .../dumpsoftwareversions/nextflow.config | 5 + .../custom/dumpsoftwareversions/test.yml | 8 +- tests/modules/custom/getchromsizes/main.nf | 2 +- .../custom/getchromsizes/nextflow.config | 5 + tests/modules/custom/getchromsizes/test.yml | 2 +- tests/modules/cutadapt/main.nf | 2 +- tests/modules/cutadapt/nextflow.config | 9 ++ tests/modules/cutadapt/test.yml | 4 +- tests/modules/damageprofiler/main.nf | 2 +- tests/modules/damageprofiler/nextflow.config | 5 + tests/modules/damageprofiler/test.yml | 6 +- tests/modules/dastool/dastool/main.nf | 8 +- tests/modules/dastool/dastool/nextflow.config | 13 ++ tests/modules/dastool/dastool/test.yml | 2 +- tests/modules/dastool/scaffolds2bin/main.nf | 6 +- .../dastool/scaffolds2bin/nextflow.config | 9 ++ tests/modules/dastool/scaffolds2bin/test.yml | 2 +- tests/modules/dedup/main.nf | 2 +- tests/modules/dedup/nextflow.config | 9 ++ tests/modules/dedup/test.yml | 2 +- tests/modules/deeptools/computematrix/main.nf | 2 +- .../deeptools/computematrix/nextflow.config | 9 ++ .../modules/deeptools/computematrix/test.yml | 2 +- .../modules/deeptools/plotfingerprint/main.nf | 2 +- .../deeptools/plotfingerprint/nextflow.config | 5 + .../deeptools/plotfingerprint/test.yml | 2 +- tests/modules/deeptools/plotheatmap/main.nf | 2 +- .../deeptools/plotheatmap/nextflow.config | 5 + tests/modules/deeptools/plotheatmap/test.yml | 2 +- tests/modules/deeptools/plotprofile/main.nf | 2 +- .../deeptools/plotprofile/nextflow.config | 5 + tests/modules/deeptools/plotprofile/test.yml | 2 +- tests/modules/delly/call/main.nf | 2 +- tests/modules/delly/call/nextflow.config | 5 + tests/modules/delly/call/test.yml | 3 +- tests/modules/diamond/blastp/main.nf | 4 +- tests/modules/diamond/blastp/nextflow.config | 9 ++ tests/modules/diamond/blastp/test.yml | 2 +- tests/modules/diamond/blastx/main.nf | 4 +- tests/modules/diamond/blastx/nextflow.config | 9 ++ tests/modules/diamond/blastx/test.yml | 2 +- tests/modules/diamond/makedb/main.nf | 2 +- tests/modules/diamond/makedb/nextflow.config | 5 + tests/modules/diamond/makedb/test.yml | 2 +- tests/modules/dragonflye/main.nf | 4 +- tests/modules/dragonflye/nextflow.config | 13 ++ tests/modules/dragonflye/test.yml | 4 +- tests/modules/dshbio/exportsegments/main.nf | 2 +- .../dshbio/exportsegments/nextflow.config | 5 + tests/modules/dshbio/exportsegments/test.yml | 2 +- tests/modules/dshbio/filterbed/main.nf | 2 +- .../modules/dshbio/filterbed/nextflow.config | 9 ++ tests/modules/dshbio/filterbed/test.yml | 2 +- tests/modules/dshbio/filtergff3/main.nf | 2 +- .../modules/dshbio/filtergff3/nextflow.config | 10 ++ tests/modules/dshbio/filtergff3/test.yml | 2 +- tests/modules/dshbio/splitbed/main.nf | 2 +- tests/modules/dshbio/splitbed/nextflow.config | 10 ++ tests/modules/dshbio/splitbed/test.yml | 2 +- tests/modules/dshbio/splitgff3/main.nf | 2 +- .../modules/dshbio/splitgff3/nextflow.config | 10 ++ tests/modules/dshbio/splitgff3/test.yml | 2 +- tests/modules/ectyper/main.nf | 10 +- tests/modules/ectyper/nextflow.config | 5 + tests/modules/ectyper/test.yml | 2 +- tests/modules/emmtyper/main.nf | 2 +- tests/modules/emmtyper/nextflow.config | 5 + tests/modules/emmtyper/test.yml | 2 +- tests/modules/ensemblvep/main.nf | 10 +- tests/modules/ensemblvep/nextflow.config | 10 ++ tests/modules/ensemblvep/test.yml | 2 +- tests/modules/expansionhunter/main.nf | 2 +- tests/modules/expansionhunter/nextflow.config | 5 + tests/modules/expansionhunter/test.yml | 2 +- tests/modules/fargene/main.nf | 2 +- tests/modules/fargene/nextflow.config | 5 + tests/modules/fargene/test.yml | 2 +- tests/modules/fastani/main.nf | 2 +- tests/modules/fastani/nextflow.config | 5 + tests/modules/fastani/test.yml | 2 +- tests/modules/fastp/main.nf | 2 +- tests/modules/fastp/nextflow.config | 5 + tests/modules/fastp/test.yml | 10 +- tests/modules/fastqc/main.nf | 2 +- tests/modules/fastqc/nextflow.config | 5 + tests/modules/fastqc/test.yml | 4 +- tests/modules/fastqscan/main.nf | 2 +- tests/modules/fastqscan/nextflow.config | 9 ++ tests/modules/fastqscan/test.yml | 2 +- tests/modules/fasttree/main.nf | 2 +- tests/modules/fasttree/nextflow.config | 5 + tests/modules/fasttree/test.yml | 2 +- .../fgbio/callmolecularconsensusreads/main.nf | 4 +- .../nextflow.config | 15 ++ .../callmolecularconsensusreads/test.yml | 2 +- tests/modules/fgbio/fastqtobam/main.nf | 17 ++- .../modules/fgbio/fastqtobam/nextflow.config | 5 + tests/modules/fgbio/fastqtobam/test.yml | 4 +- tests/modules/fgbio/groupreadsbyumi/main.nf | 9 +- .../fgbio/groupreadsbyumi/nextflow.config | 5 + tests/modules/fgbio/groupreadsbyumi/test.yml | 2 +- tests/modules/fgbio/sortbam/main.nf | 2 +- tests/modules/fgbio/sortbam/nextflow.config | 5 + tests/modules/fgbio/sortbam/test.yml | 2 +- tests/modules/filtlong/main.nf | 2 +- tests/modules/filtlong/nextflow.config | 5 + tests/modules/filtlong/test.yml | 6 +- tests/modules/flash/main.nf | 2 +- tests/modules/flash/nextflow.config | 9 ++ tests/modules/flash/test.yml | 2 +- tests/modules/freebayes/main.nf | 2 +- tests/modules/freebayes/nextflow.config | 5 + tests/modules/freebayes/test.yml | 10 +- tests/modules/gatk4/applybqsr/main.nf | 2 +- tests/modules/gatk4/applybqsr/nextflow.config | 5 + tests/modules/gatk4/applybqsr/test.yml | 6 +- tests/modules/gatk4/baserecalibrator/main.nf | 2 +- .../gatk4/baserecalibrator/nextflow.config | 5 + tests/modules/gatk4/baserecalibrator/test.yml | 8 +- tests/modules/gatk4/bedtointervallist/main.nf | 2 +- .../gatk4/bedtointervallist/nextflow.config | 5 + .../modules/gatk4/bedtointervallist/test.yml | 2 +- .../gatk4/calculatecontamination/main.nf | 2 +- .../calculatecontamination/nextflow.config | 5 + .../gatk4/calculatecontamination/test.yml | 6 +- .../gatk4/createsequencedictionary/main.nf | 2 +- .../createsequencedictionary/nextflow.config | 5 + .../gatk4/createsequencedictionary/test.yml | 2 +- .../gatk4/createsomaticpanelofnormals/main.nf | 4 +- .../nextflow.config | 9 ++ .../createsomaticpanelofnormals/test.yml | 2 +- .../gatk4/estimatelibrarycomplexity/main.nf | 2 +- .../estimatelibrarycomplexity/nextflow.config | 5 + .../gatk4/estimatelibrarycomplexity/test.yml | 2 +- tests/modules/gatk4/fastqtosam/main.nf | 2 +- .../modules/gatk4/fastqtosam/nextflow.config | 5 + tests/modules/gatk4/fastqtosam/test.yml | 4 +- tests/modules/gatk4/filtermutectcalls/main.nf | 2 +- .../gatk4/filtermutectcalls/nextflow.config | 9 ++ .../modules/gatk4/filtermutectcalls/test.yml | 6 +- tests/modules/gatk4/genomicsdbimport/main.nf | 4 +- .../gatk4/genomicsdbimport/nextflow.config | 5 + tests/modules/gatk4/genomicsdbimport/test.yml | 6 +- tests/modules/gatk4/genotypegvcfs/main.nf | 4 +- .../gatk4/genotypegvcfs/nextflow.config | 9 ++ tests/modules/gatk4/genotypegvcfs/test.yml | 18 +-- .../modules/gatk4/getpileupsummaries/main.nf | 2 +- .../gatk4/getpileupsummaries/nextflow.config | 5 + .../modules/gatk4/getpileupsummaries/test.yml | 4 +- tests/modules/gatk4/haplotypecaller/main.nf | 2 +- .../gatk4/haplotypecaller/nextflow.config | 5 + tests/modules/gatk4/haplotypecaller/test.yml | 6 +- tests/modules/gatk4/indexfeaturefile/main.nf | 2 +- .../gatk4/indexfeaturefile/nextflow.config | 5 + tests/modules/gatk4/indexfeaturefile/test.yml | 8 +- tests/modules/gatk4/intervallisttools/main.nf | 12 +- .../gatk4/intervallisttools/nextflow.config | 9 ++ .../modules/gatk4/intervallisttools/test.yml | 10 +- .../gatk4/learnreadorientationmodel/main.nf | 2 +- .../learnreadorientationmodel/nextflow.config | 9 ++ .../gatk4/learnreadorientationmodel/test.yml | 2 +- tests/modules/gatk4/markduplicates/main.nf | 2 +- .../gatk4/markduplicates/nextflow.config | 5 + tests/modules/gatk4/markduplicates/test.yml | 4 +- tests/modules/gatk4/mergebamalignment/main.nf | 2 +- .../gatk4/mergebamalignment/nextflow.config | 5 + .../modules/gatk4/mergebamalignment/test.yml | 2 +- tests/modules/gatk4/mergevcfs/main.nf | 2 +- tests/modules/gatk4/mergevcfs/nextflow.config | 5 + tests/modules/gatk4/mergevcfs/test.yml | 4 +- tests/modules/gatk4/mutect2/main.nf | 4 +- tests/modules/gatk4/mutect2/nextflow.config | 9 ++ tests/modules/gatk4/mutect2/test.yml | 10 +- tests/modules/gatk4/revertsam/main.nf | 2 +- tests/modules/gatk4/revertsam/nextflow.config | 5 + tests/modules/gatk4/revertsam/test.yml | 2 +- tests/modules/gatk4/samtofastq/main.nf | 2 +- .../modules/gatk4/samtofastq/nextflow.config | 5 + tests/modules/gatk4/samtofastq/test.yml | 4 +- tests/modules/gatk4/splitncigarreads/main.nf | 2 +- .../gatk4/splitncigarreads/nextflow.config | 5 + tests/modules/gatk4/splitncigarreads/test.yml | 2 +- tests/modules/gatk4/variantfiltration/main.nf | 35 ++--- .../gatk4/variantfiltration/nextflow.config | 10 ++ .../modules/gatk4/variantfiltration/test.yml | 4 +- tests/modules/genmap/index/main.nf | 2 +- tests/modules/genmap/index/nextflow.config | 5 + tests/modules/genmap/index/test.yml | 2 +- tests/modules/genmap/mappability/main.nf | 4 +- .../genmap/mappability/nextflow.config | 9 ++ tests/modules/genmap/mappability/test.yml | 2 +- tests/modules/genrich/main.nf | 8 +- tests/modules/genrich/nextflow.config | 21 +++ tests/modules/genrich/test.yml | 10 +- tests/modules/gffread/main.nf | 2 +- tests/modules/gffread/nextflow.config | 9 ++ tests/modules/gffread/test.yml | 2 +- tests/modules/glnexus/main.nf | 14 +- tests/modules/glnexus/nextflow.config | 5 + tests/modules/glnexus/test.yml | 4 +- tests/modules/graphmap2/align/main.nf | 4 +- tests/modules/graphmap2/align/nextflow.config | 5 + tests/modules/graphmap2/align/test.yml | 2 +- tests/modules/graphmap2/index/main.nf | 2 +- tests/modules/graphmap2/index/nextflow.config | 5 + tests/modules/graphmap2/index/test.yml | 2 +- tests/modules/gstama/collapse/main.nf | 2 +- tests/modules/gstama/collapse/nextflow.config | 10 ++ tests/modules/gstama/collapse/test.yml | 2 +- tests/modules/gstama/merge/main.nf | 2 +- tests/modules/gstama/merge/nextflow.config | 9 ++ tests/modules/gstama/merge/test.yml | 2 +- tests/modules/gtdbtk/classifywf/main.nf | 2 +- .../modules/gtdbtk/classifywf/nextflow.config | 5 + tests/modules/gtdbtk/classifywf/test.yml | 2 +- tests/modules/gubbins/main.nf | 2 +- tests/modules/gubbins/nextflow.config | 5 + tests/modules/gubbins/test.yml | 2 +- tests/modules/gunc/downloaddb/main.nf | 2 +- tests/modules/gunc/downloaddb/nextflow.config | 5 + tests/modules/gunc/downloaddb/test.yml | 2 +- tests/modules/gunc/run/main.nf | 14 +- tests/modules/gunc/run/nextflow.config | 5 + tests/modules/gunc/run/test.yml | 2 +- tests/modules/gunzip/main.nf | 2 +- tests/modules/gunzip/nextflow.config | 5 + tests/modules/gunzip/test.yml | 2 +- tests/modules/hicap/main.nf | 11 +- tests/modules/hicap/nextflow.config | 5 + tests/modules/hicap/test.yml | 8 +- tests/modules/hifiasm/main.nf | 2 +- tests/modules/hifiasm/nextflow.config | 9 ++ tests/modules/hifiasm/test.yml | 4 +- tests/modules/hisat2/align/main.nf | 26 ++-- tests/modules/hisat2/align/nextflow.config | 5 + tests/modules/hisat2/align/test.yml | 36 ++--- tests/modules/hisat2/build_test/main.nf | 4 +- .../modules/hisat2/build_test/nextflow.config | 5 + tests/modules/hisat2/build_test/test.yml | 18 +-- .../modules/hisat2/extractsplicesites/main.nf | 2 +- .../hisat2/extractsplicesites/nextflow.config | 5 + .../hisat2/extractsplicesites/test.yml | 2 +- tests/modules/hmmcopy/gccounter/main.nf | 2 +- .../modules/hmmcopy/gccounter/nextflow.config | 5 + tests/modules/hmmcopy/gccounter/test.yml | 2 +- tests/modules/hmmcopy/readcounter/main.nf | 2 +- .../hmmcopy/readcounter/nextflow.config | 5 + tests/modules/hmmcopy/readcounter/test.yml | 2 +- tests/modules/hmmer/hmmalign/main.nf | 2 +- tests/modules/hmmer/hmmalign/nextflow.config | 5 + tests/modules/hmmer/hmmalign/test.yml | 2 +- tests/modules/homer/annotatepeaks/main.nf | 2 +- .../homer/annotatepeaks/nextflow.config | 5 + tests/modules/homer/annotatepeaks/test.yml | 2 +- tests/modules/homer/findpeaks/main.nf | 4 +- tests/modules/homer/findpeaks/nextflow.config | 13 ++ tests/modules/homer/findpeaks/test.yml | 2 +- tests/modules/homer/maketagdirectory/main.nf | 2 +- .../homer/maketagdirectory/nextflow.config | 9 ++ tests/modules/homer/maketagdirectory/test.yml | 4 +- tests/modules/homer/makeucscfile/main.nf | 4 +- .../homer/makeucscfile/nextflow.config | 9 ++ tests/modules/homer/makeucscfile/test.yml | 2 +- tests/modules/idr/main.nf | 2 +- tests/modules/idr/nextflow.config | 5 + tests/modules/idr/test.yml | 6 +- tests/modules/imputeme/vcftoprs/main.nf | 2 +- .../modules/imputeme/vcftoprs/nextflow.config | 5 + tests/modules/imputeme/vcftoprs/test.yml | 2 +- tests/modules/iqtree/main.nf | 2 +- tests/modules/iqtree/nextflow.config | 5 + tests/modules/iqtree/test.yml | 2 +- tests/modules/ismapper/main.nf | 2 +- tests/modules/ismapper/nextflow.config | 5 + tests/modules/ismapper/test.yml | 2 +- tests/modules/isoseq3/cluster/main.nf | 2 +- tests/modules/isoseq3/cluster/nextflow.config | 9 ++ tests/modules/isoseq3/cluster/test.yml | 2 +- tests/modules/isoseq3/refine/main.nf | 2 +- tests/modules/isoseq3/refine/nextflow.config | 9 ++ tests/modules/isoseq3/refine/test.yml | 2 +- tests/modules/ivar/consensus/main.nf | 2 +- tests/modules/ivar/consensus/nextflow.config | 9 ++ tests/modules/ivar/consensus/test.yml | 2 +- tests/modules/ivar/trim/main.nf | 2 +- tests/modules/ivar/trim/nextflow.config | 5 + tests/modules/ivar/trim/test.yml | 2 +- tests/modules/ivar/variants/main.nf | 2 +- tests/modules/ivar/variants/nextflow.config | 5 + tests/modules/ivar/variants/test.yml | 6 +- tests/modules/jupyternotebook/main.nf | 12 +- tests/modules/jupyternotebook/nextflow.config | 19 +++ tests/modules/jupyternotebook/test.yml | 6 +- tests/modules/kallisto/index/main.nf | 2 +- tests/modules/kallisto/index/nextflow.config | 5 + tests/modules/kallisto/index/test.yml | 2 +- tests/modules/kallistobustools/count/main.nf | 2 +- .../kallistobustools/count/nextflow.config | 9 ++ tests/modules/kallistobustools/count/test.yml | 2 +- tests/modules/kallistobustools/ref/main.nf | 2 +- .../kallistobustools/ref/nextflow.config | 5 + tests/modules/kallistobustools/ref/test.yml | 6 +- tests/modules/khmer/normalizebymedian/main.nf | 6 +- .../khmer/normalizebymedian/nextflow.config | 9 ++ .../modules/khmer/normalizebymedian/test.yml | 10 +- tests/modules/kleborate/main.nf | 2 +- tests/modules/kleborate/nextflow.config | 5 + tests/modules/kleborate/test.yml | 2 +- tests/modules/kraken2/kraken2/main.nf | 4 +- tests/modules/kraken2/kraken2/nextflow.config | 5 + tests/modules/kraken2/kraken2/test.yml | 4 +- tests/modules/krona/kronadb/main.nf | 9 ++ tests/modules/krona/kronadb/nextflow.config | 5 + tests/modules/krona/kronadb/test.yml | 7 + tests/modules/krona/ktimporttaxonomy/main.nf | 16 +++ .../krona/ktimporttaxonomy/nextflow.config | 5 + tests/modules/krona/ktimporttaxonomy/test.yml | 9 ++ tests/modules/kronatools/kronadb/main.nf | 9 -- tests/modules/kronatools/kronadb/test.yml | 7 - .../kronatools/ktimporttaxonomy/main.nf | 15 -- .../kronatools/ktimporttaxonomy/test.yml | 9 -- tests/modules/last/dotplot/main.nf | 2 +- tests/modules/last/dotplot/nextflow.config | 5 + tests/modules/last/dotplot/test.yml | 2 +- tests/modules/last/lastal/main.nf | 4 +- tests/modules/last/lastal/nextflow.config | 5 + tests/modules/last/lastal/test.yml | 4 +- tests/modules/last/lastdb/main.nf | 2 +- tests/modules/last/lastdb/nextflow.config | 9 ++ tests/modules/last/lastdb/test.yml | 4 +- tests/modules/last/mafconvert/main.nf | 2 +- tests/modules/last/mafconvert/nextflow.config | 5 + tests/modules/last/mafconvert/test.yml | 2 +- tests/modules/last/mafswap/main.nf | 2 +- tests/modules/last/mafswap/nextflow.config | 5 + tests/modules/last/mafswap/test.yml | 2 +- tests/modules/last/postmask/main.nf | 2 +- tests/modules/last/postmask/nextflow.config | 9 ++ tests/modules/last/postmask/test.yml | 2 +- tests/modules/last/split/main.nf | 2 +- tests/modules/last/split/nextflow.config | 9 ++ tests/modules/last/split/test.yml | 2 +- tests/modules/last/train/main.nf | 4 +- tests/modules/last/train/nextflow.config | 5 + tests/modules/last/train/test.yml | 2 +- tests/modules/leehom/main.nf | 4 +- tests/modules/leehom/nextflow.config | 9 ++ tests/modules/leehom/test.yml | 10 +- tests/modules/lima/main.nf | 2 +- tests/modules/lima/nextflow.config | 10 ++ tests/modules/lima/test.yml | 10 +- tests/modules/lissero/main.nf | 2 +- tests/modules/lissero/nextflow.config | 5 + tests/modules/lissero/test.yml | 2 +- tests/modules/lofreq/call/main.nf | 2 +- tests/modules/lofreq/call/nextflow.config | 5 + tests/modules/lofreq/call/test.yml | 2 +- tests/modules/lofreq/callparallel/main.nf | 2 +- .../lofreq/callparallel/nextflow.config | 5 + tests/modules/lofreq/callparallel/test.yml | 2 +- tests/modules/lofreq/filter/main.nf | 2 +- tests/modules/lofreq/filter/nextflow.config | 5 + tests/modules/lofreq/filter/test.yml | 2 +- tests/modules/lofreq/indelqual/main.nf | 2 +- .../modules/lofreq/indelqual/nextflow.config | 10 ++ tests/modules/lofreq/indelqual/test.yml | 2 +- tests/modules/macs2/callpeak/main.nf | 6 +- tests/modules/macs2/callpeak/nextflow.config | 17 +++ tests/modules/macs2/callpeak/test.yml | 6 +- tests/modules/malt/build_test/main.nf | 4 +- tests/modules/malt/build_test/nextflow.config | 5 + tests/modules/malt/build_test/test.yml | 4 +- tests/modules/malt/run/main.nf | 6 +- tests/modules/malt/run/nextflow.config | 5 + tests/modules/malt/run/test.yml | 2 +- tests/modules/maltextract/main.nf | 10 +- tests/modules/maltextract/nextflow.config | 5 + tests/modules/maltextract/test.yml | 2 +- tests/modules/manta/germline/main.nf | 2 +- tests/modules/manta/germline/nextflow.config | 5 + tests/modules/manta/germline/test.yml | 4 +- tests/modules/manta/somatic/main.nf | 2 +- tests/modules/manta/somatic/nextflow.config | 5 + tests/modules/manta/somatic/test.yml | 2 +- tests/modules/manta/tumoronly/main.nf | 2 +- tests/modules/manta/tumoronly/nextflow.config | 5 + tests/modules/manta/tumoronly/test.yml | 4 +- tests/modules/mapdamage2/main.nf | 2 +- tests/modules/mapdamage2/nextflow.config | 5 + tests/modules/mapdamage2/test.yml | 2 +- tests/modules/mash/sketch/main.nf | 2 +- tests/modules/mash/sketch/nextflow.config | 5 + tests/modules/mash/sketch/test.yml | 2 +- tests/modules/mashtree/main.nf | 2 +- tests/modules/mashtree/nextflow.config | 5 + tests/modules/mashtree/test.yml | 2 +- tests/modules/maxbin2/main.nf | 2 +- tests/modules/maxbin2/nextflow.config | 5 + tests/modules/maxbin2/test.yml | 2 +- tests/modules/medaka/main.nf | 2 +- tests/modules/medaka/nextflow.config | 9 ++ tests/modules/medaka/test.yml | 2 +- tests/modules/megahit/main.nf | 2 +- tests/modules/megahit/nextflow.config | 5 + tests/modules/megahit/test.yml | 4 +- tests/modules/meningotype/main.nf | 2 +- tests/modules/meningotype/nextflow.config | 5 + tests/modules/meningotype/test.yml | 2 +- .../jgisummarizebamcontigdepths/main.nf | 2 +- .../nextflow.config | 5 + .../jgisummarizebamcontigdepths/test.yml | 2 +- tests/modules/metabat2/metabat2/main.nf | 4 +- .../modules/metabat2/metabat2/nextflow.config | 9 ++ tests/modules/metabat2/metabat2/test.yml | 4 +- tests/modules/metaphlan3/main.nf | 8 +- tests/modules/metaphlan3/nextflow.config | 13 ++ tests/modules/metaphlan3/test.yml | 8 +- tests/modules/methyldackel/extract/main.nf | 2 +- .../methyldackel/extract/nextflow.config | 5 + tests/modules/methyldackel/extract/test.yml | 2 +- tests/modules/methyldackel/mbias/main.nf | 2 +- .../methyldackel/mbias/nextflow.config | 5 + tests/modules/methyldackel/mbias/test.yml | 2 +- tests/modules/minia/main.nf | 2 +- tests/modules/minia/nextflow.config | 5 + tests/modules/minia/test.yml | 2 +- tests/modules/miniasm/main.nf | 2 +- tests/modules/miniasm/nextflow.config | 9 ++ tests/modules/miniasm/test.yml | 2 +- tests/modules/minimap2/align/main.nf | 2 +- tests/modules/minimap2/align/nextflow.config | 5 + tests/modules/minimap2/align/test.yml | 4 +- tests/modules/minimap2/index/main.nf | 2 +- tests/modules/minimap2/index/nextflow.config | 5 + tests/modules/minimap2/index/test.yml | 2 +- tests/modules/mlst/main.nf | 2 +- tests/modules/mlst/nextflow.config | 5 + tests/modules/mlst/test.yml | 2 +- tests/modules/mosdepth/main.nf | 2 +- tests/modules/mosdepth/nextflow.config | 5 + tests/modules/mosdepth/test.yml | 2 +- tests/modules/msisensor/msi/main.nf | 4 +- tests/modules/msisensor/msi/nextflow.config | 5 + tests/modules/msisensor/msi/test.yml | 2 +- tests/modules/msisensor/scan/main.nf | 2 +- tests/modules/msisensor/scan/nextflow.config | 5 + tests/modules/msisensor/scan/test.yml | 2 +- tests/modules/mtnucratio/main.nf | 2 +- tests/modules/mtnucratio/nextflow.config | 5 + tests/modules/mtnucratio/test.yml | 2 +- tests/modules/multiqc/main.nf | 4 +- tests/modules/multiqc/nextflow.config | 5 + tests/modules/multiqc/test.yml | 2 +- tests/modules/mummer/main.nf | 2 +- tests/modules/mummer/nextflow.config | 5 + tests/modules/mummer/test.yml | 2 +- tests/modules/muscle/main.nf | 4 +- tests/modules/muscle/nextflow.config | 13 ++ tests/modules/muscle/test.yml | 2 +- tests/modules/nanolyse/main.nf | 2 +- tests/modules/nanolyse/nextflow.config | 9 ++ tests/modules/nanolyse/test.yml | 2 +- tests/modules/nanoplot/main.nf | 2 +- tests/modules/nanoplot/nextflow.config | 5 + tests/modules/nanoplot/test.yml | 4 +- tests/modules/ncbigenomedownload/main.nf | 2 +- .../ncbigenomedownload/nextflow.config | 8 ++ tests/modules/ncbigenomedownload/test.yml | 2 +- tests/modules/nextclade/main.nf | 2 +- tests/modules/nextclade/nextflow.config | 5 + tests/modules/nextclade/test.yml | 2 +- tests/modules/ngmaster/main.nf | 2 +- tests/modules/ngmaster/nextflow.config | 5 + tests/modules/ngmaster/test.yml | 2 +- tests/modules/nucmer/main.nf | 2 +- tests/modules/nucmer/nextflow.config | 5 + tests/modules/nucmer/test.yml | 2 +- tests/modules/optitype/main.nf | 2 +- tests/modules/optitype/nextflow.config | 10 ++ tests/modules/optitype/test.yml | 2 +- tests/modules/pairix/main.nf | 2 +- tests/modules/pairix/nextflow.config | 5 + tests/modules/pairix/test.yml | 2 +- tests/modules/pairtools/dedup/main.nf | 2 +- tests/modules/pairtools/dedup/nextflow.config | 9 ++ tests/modules/pairtools/dedup/test.yml | 2 +- tests/modules/pairtools/flip/main.nf | 2 +- tests/modules/pairtools/flip/nextflow.config | 5 + tests/modules/pairtools/flip/test.yml | 2 +- tests/modules/pairtools/parse/main.nf | 2 +- tests/modules/pairtools/parse/nextflow.config | 9 ++ tests/modules/pairtools/parse/test.yml | 2 +- tests/modules/pairtools/restrict/main.nf | 2 +- .../pairtools/restrict/nextflow.config | 9 ++ tests/modules/pairtools/restrict/test.yml | 2 +- tests/modules/pairtools/select/main.nf | 2 +- .../modules/pairtools/select/nextflow.config | 9 ++ tests/modules/pairtools/select/test.yml | 2 +- tests/modules/pairtools/sort/main.nf | 2 +- tests/modules/pairtools/sort/nextflow.config | 9 ++ tests/modules/pairtools/sort/test.yml | 2 +- tests/modules/pangolin/main.nf | 2 +- tests/modules/pangolin/nextflow.config | 5 + tests/modules/pangolin/test.yml | 2 +- tests/modules/paraclu/main.nf | 2 +- tests/modules/paraclu/nextflow.config | 5 + tests/modules/paraclu/test.yml | 2 +- tests/modules/pbbam/pbmerge/main.nf | 2 +- tests/modules/pbbam/pbmerge/nextflow.config | 9 ++ tests/modules/pbbam/pbmerge/test.yml | 2 +- tests/modules/pbccs/main.nf | 2 +- tests/modules/pbccs/nextflow.config | 9 ++ tests/modules/pbccs/test.yml | 2 +- tests/modules/peddy/main.nf | 4 +- tests/modules/peddy/nextflow.config | 5 + tests/modules/peddy/test.yml | 2 +- tests/modules/phyloflash/main.nf | 12 +- tests/modules/phyloflash/nextflow.config | 5 + tests/modules/phyloflash/test.yml | 4 +- tests/modules/picard/collecthsmetrics/main.nf | 2 +- .../picard/collecthsmetrics/nextflow.config | 5 + .../modules/picard/collecthsmetrics/test.yml | 2 +- .../picard/collectmultiplemetrics/main.nf | 2 +- .../collectmultiplemetrics/nextflow.config | 5 + .../picard/collectmultiplemetrics/test.yml | 2 +- .../modules/picard/collectwgsmetrics/main.nf | 2 +- .../picard/collectwgsmetrics/nextflow.config | 5 + .../modules/picard/collectwgsmetrics/test.yml | 2 +- tests/modules/picard/filtersamreads/main.nf | 4 +- .../picard/filtersamreads/nextflow.config | 13 ++ tests/modules/picard/filtersamreads/test.yml | 4 +- tests/modules/picard/markduplicates/main.nf | 4 +- .../picard/markduplicates/nextflow.config | 9 ++ tests/modules/picard/markduplicates/test.yml | 4 +- tests/modules/picard/mergesamfiles/main.nf | 2 +- .../picard/mergesamfiles/nextflow.config | 5 + tests/modules/picard/mergesamfiles/test.yml | 2 +- tests/modules/picard/sortsam/main.nf | 2 +- tests/modules/picard/sortsam/nextflow.config | 9 ++ tests/modules/picard/sortsam/test.yml | 2 +- tests/modules/pirate/main.nf | 19 ++- tests/modules/pirate/nextflow.config | 5 + tests/modules/pirate/test.yml | 2 +- tests/modules/plasmidid/main.nf | 2 +- tests/modules/plasmidid/nextflow.config | 9 ++ tests/modules/plasmidid/test.yml | 2 +- tests/modules/plink/extract/main.nf | 4 +- tests/modules/plink/extract/nextflow.config | 13 ++ tests/modules/plink/extract/test.yml | 2 +- tests/modules/plink/vcf/main.nf | 2 +- tests/modules/plink/vcf/nextflow.config | 9 ++ tests/modules/plink/vcf/test.yml | 2 +- tests/modules/plink2/vcf/main.nf | 2 +- tests/modules/plink2/vcf/nextflow.config | 9 ++ tests/modules/plink2/vcf/test.yml | 2 +- tests/modules/pmdtools/filter/main.nf | 2 +- tests/modules/pmdtools/filter/nextflow.config | 5 + tests/modules/pmdtools/filter/test.yml | 2 +- tests/modules/porechop/main.nf | 2 +- tests/modules/porechop/nextflow.config | 10 ++ tests/modules/porechop/test.yml | 2 +- tests/modules/preseq/lcextrap/main.nf | 2 +- tests/modules/preseq/lcextrap/nextflow.config | 5 + tests/modules/preseq/lcextrap/test.yml | 4 +- tests/modules/prodigal/main.nf | 2 +- tests/modules/prodigal/nextflow.config | 5 + tests/modules/prodigal/test.yml | 2 +- tests/modules/prokka/main.nf | 2 +- tests/modules/prokka/nextflow.config | 5 + tests/modules/prokka/test.yml | 2 +- tests/modules/pycoqc/main.nf | 2 +- tests/modules/pycoqc/nextflow.config | 9 ++ tests/modules/pycoqc/test.yml | 2 +- tests/modules/pydamage/analyze/main.nf | 2 +- .../modules/pydamage/analyze/nextflow.config | 5 + tests/modules/pydamage/analyze/test.yml | 2 +- tests/modules/pydamage/filter/main.nf | 4 +- tests/modules/pydamage/filter/nextflow.config | 5 + tests/modules/pydamage/filter/test.yml | 4 +- tests/modules/qcat/main.nf | 2 +- tests/modules/qcat/nextflow.config | 5 + tests/modules/qcat/test.yml | 2 +- tests/modules/qualimap/bamqc/main.nf | 2 +- tests/modules/qualimap/bamqc/nextflow.config | 5 + tests/modules/qualimap/bamqc/test.yml | 2 +- tests/modules/quast/main.nf | 2 +- tests/modules/quast/nextflow.config | 5 + tests/modules/quast/test.yml | 4 +- tests/modules/racon/main.nf | 2 +- tests/modules/racon/nextflow.config | 5 + tests/modules/racon/test.yml | 2 +- tests/modules/rapidnj/main.nf | 2 +- tests/modules/rapidnj/nextflow.config | 5 + tests/modules/rapidnj/test.yml | 2 +- tests/modules/rasusa/main.nf | 2 +- tests/modules/rasusa/nextflow.config | 9 ++ tests/modules/rasusa/test.yml | 2 +- tests/modules/raxmlng/main.nf | 4 +- tests/modules/raxmlng/nextflow.config | 13 ++ tests/modules/raxmlng/test.yml | 4 +- tests/modules/rmarkdownnotebook/main.nf | 8 +- .../modules/rmarkdownnotebook/nextflow.config | 15 ++ tests/modules/rmarkdownnotebook/test.yml | 4 +- tests/modules/roary/main.nf | 15 +- tests/modules/roary/nextflow.config | 5 + tests/modules/roary/test.yml | 8 +- .../modules/rsem/calculateexpression/main.nf | 4 +- .../rsem/calculateexpression/nextflow.config | 13 ++ .../modules/rsem/calculateexpression/test.yml | 50 +++---- tests/modules/rsem/preparereference/main.nf | 2 +- .../rsem/preparereference/nextflow.config | 5 + tests/modules/rsem/preparereference/test.yml | 2 +- tests/modules/rseqc/bamstat/main.nf | 2 +- tests/modules/rseqc/bamstat/nextflow.config | 5 + tests/modules/rseqc/bamstat/test.yml | 2 +- tests/modules/rseqc/inferexperiment/main.nf | 2 +- .../rseqc/inferexperiment/nextflow.config | 5 + tests/modules/rseqc/inferexperiment/test.yml | 2 +- tests/modules/rseqc/innerdistance/main.nf | 2 +- .../rseqc/innerdistance/nextflow.config | 5 + tests/modules/rseqc/innerdistance/test.yml | 2 +- .../modules/rseqc/junctionannotation/main.nf | 2 +- .../rseqc/junctionannotation/nextflow.config | 5 + .../modules/rseqc/junctionannotation/test.yml | 2 +- .../modules/rseqc/junctionsaturation/main.nf | 2 +- .../rseqc/junctionsaturation/nextflow.config | 5 + .../modules/rseqc/junctionsaturation/test.yml | 2 +- tests/modules/rseqc/readdistribution/main.nf | 2 +- .../rseqc/readdistribution/nextflow.config | 5 + tests/modules/rseqc/readdistribution/test.yml | 2 +- tests/modules/rseqc/readduplication/main.nf | 2 +- .../rseqc/readduplication/nextflow.config | 5 + tests/modules/rseqc/readduplication/test.yml | 2 +- tests/modules/salmon/index/main.nf | 2 +- tests/modules/salmon/index/nextflow.config | 5 + tests/modules/salmon/index/test.yml | 2 +- tests/modules/salmon/quant/main.nf | 33 +++-- tests/modules/salmon/quant/nextflow.config | 9 ++ tests/modules/salmon/quant/test.yml | 92 ++++++------ tests/modules/samblaster/main.nf | 2 +- tests/modules/samblaster/nextflow.config | 10 ++ tests/modules/samblaster/test.yml | 2 +- tests/modules/samtools/ampliconclip/main.nf | 2 +- .../samtools/ampliconclip/nextflow.config | 5 + tests/modules/samtools/ampliconclip/test.yml | 8 +- tests/modules/samtools/bam2fq/main.nf | 2 +- tests/modules/samtools/bam2fq/nextflow.config | 9 ++ tests/modules/samtools/bam2fq/test.yml | 4 +- tests/modules/samtools/depth/main.nf | 2 +- tests/modules/samtools/depth/nextflow.config | 5 + tests/modules/samtools/depth/test.yml | 2 +- tests/modules/samtools/faidx/main.nf | 2 +- tests/modules/samtools/faidx/nextflow.config | 5 + tests/modules/samtools/faidx/test.yml | 2 +- tests/modules/samtools/fastq/main.nf | 2 +- tests/modules/samtools/fastq/nextflow.config | 5 + tests/modules/samtools/fastq/test.yml | 2 +- tests/modules/samtools/fixmate/main.nf | 2 +- .../modules/samtools/fixmate/nextflow.config | 9 ++ tests/modules/samtools/fixmate/test.yml | 2 +- tests/modules/samtools/flagstat/main.nf | 11 +- .../modules/samtools/flagstat/nextflow.config | 5 + tests/modules/samtools/flagstat/test.yml | 2 +- tests/modules/samtools/idxstats/main.nf | 2 +- .../modules/samtools/idxstats/nextflow.config | 5 + tests/modules/samtools/idxstats/test.yml | 2 +- tests/modules/samtools/index/main.nf | 6 +- tests/modules/samtools/index/nextflow.config | 9 ++ tests/modules/samtools/index/test.yml | 6 +- tests/modules/samtools/merge/main.nf | 2 +- tests/modules/samtools/merge/nextflow.config | 9 ++ tests/modules/samtools/merge/test.yml | 4 +- tests/modules/samtools/mpileup/main.nf | 2 +- .../modules/samtools/mpileup/nextflow.config | 5 + tests/modules/samtools/mpileup/test.yml | 2 +- tests/modules/samtools/sort/main.nf | 2 +- tests/modules/samtools/sort/nextflow.config | 9 ++ tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/main.nf | 2 +- tests/modules/samtools/stats/nextflow.config | 5 + tests/modules/samtools/stats/test.yml | 4 +- tests/modules/samtools/view/main.nf | 2 +- tests/modules/samtools/view/nextflow.config | 5 + tests/modules/samtools/view/test.yml | 4 +- tests/modules/scoary/main.nf | 2 +- tests/modules/scoary/nextflow.config | 5 + tests/modules/scoary/test.yml | 2 +- tests/modules/seacr/callpeak/main.nf | 2 +- tests/modules/seacr/callpeak/nextflow.config | 9 ++ tests/modules/seacr/callpeak/test.yml | 4 +- tests/modules/seqkit/split2/main.nf | 6 +- tests/modules/seqkit/split2/nextflow.config | 17 +++ tests/modules/seqkit/split2/test.yml | 12 +- tests/modules/seqsero2/main.nf | 2 +- tests/modules/seqsero2/nextflow.config | 9 ++ tests/modules/seqsero2/test.yml | 2 +- tests/modules/seqtk/mergepe/main.nf | 2 +- tests/modules/seqtk/mergepe/nextflow.config | 9 ++ tests/modules/seqtk/mergepe/test.yml | 4 +- tests/modules/seqtk/sample/main.nf | 2 +- tests/modules/seqtk/sample/nextflow.config | 10 ++ tests/modules/seqtk/sample/test.yml | 4 +- tests/modules/seqtk/subseq/main.nf | 2 +- tests/modules/seqtk/subseq/nextflow.config | 9 ++ tests/modules/seqtk/subseq/test.yml | 2 +- tests/modules/sequenzautils/bam2seqz/main.nf | 2 +- .../sequenzautils/bam2seqz/nextflow.config | 5 + tests/modules/sequenzautils/bam2seqz/test.yml | 2 +- tests/modules/sequenzautils/gcwiggle/main.nf | 2 +- .../sequenzautils/gcwiggle/nextflow.config | 9 ++ tests/modules/sequenzautils/gcwiggle/test.yml | 4 +- tests/modules/seqwish/induce/main.nf | 2 +- tests/modules/seqwish/induce/nextflow.config | 5 + tests/modules/seqwish/induce/test.yml | 2 +- tests/modules/shovill/main.nf | 8 +- tests/modules/shovill/nextflow.config | 21 +++ tests/modules/shovill/test.yml | 8 +- tests/modules/snpdists/main.nf | 2 +- tests/modules/snpdists/nextflow.config | 5 + tests/modules/snpdists/test.yml | 2 +- tests/modules/snpeff/main.nf | 10 +- tests/modules/snpeff/nextflow.config | 10 ++ tests/modules/snpeff/test.yml | 2 +- tests/modules/snpsites/main.nf | 2 +- tests/modules/snpsites/nextflow.config | 5 + tests/modules/snpsites/test.yml | 2 +- tests/modules/spades/main.nf | 2 +- tests/modules/spades/nextflow.config | 9 ++ tests/modules/spades/test.yml | 8 +- tests/modules/spatyper/main.nf | 4 +- tests/modules/spatyper/nextflow.config | 9 ++ tests/modules/spatyper/test.yml | 4 +- tests/modules/sratools/fasterqdump/main.nf | 2 +- .../sratools/fasterqdump/nextflow.config | 5 + tests/modules/sratools/fasterqdump/test.yml | 10 +- tests/modules/sratools/prefetch/main.nf | 2 +- .../modules/sratools/prefetch/nextflow.config | 5 + tests/modules/sratools/prefetch/test.yml | 2 +- tests/modules/staphopiasccmec/main.nf | 4 +- tests/modules/staphopiasccmec/nextflow.config | 9 ++ tests/modules/staphopiasccmec/test.yml | 4 +- tests/modules/star/align/main.nf | 72 +++++++--- tests/modules/star/align/nextflow.config | 21 +++ tests/modules/star/align/test.yml | 136 +++++++++--------- tests/modules/star/genomegenerate/main.nf | 2 +- .../star/genomegenerate/nextflow.config | 5 + tests/modules/star/genomegenerate/test.yml | 2 +- tests/modules/strelka/germline/main.nf | 2 +- .../modules/strelka/germline/nextflow.config | 5 + tests/modules/strelka/germline/test.yml | 4 +- tests/modules/strelka/somatic/main.nf | 2 +- tests/modules/strelka/somatic/nextflow.config | 5 + tests/modules/strelka/somatic/test.yml | 4 +- tests/modules/stringtie/merge/main.nf | 4 +- tests/modules/stringtie/merge/nextflow.config | 5 + tests/modules/stringtie/merge/test.yml | 4 +- tests/modules/stringtie/stringtie/main.nf | 2 +- .../stringtie/stringtie/nextflow.config | 5 + tests/modules/stringtie/stringtie/test.yml | 4 +- tests/modules/subread/featurecounts/main.nf | 2 +- .../subread/featurecounts/nextflow.config | 9 ++ tests/modules/subread/featurecounts/test.yml | 6 +- tests/modules/tabix/bgzip/main.nf | 2 +- tests/modules/tabix/bgzip/nextflow.config | 5 + tests/modules/tabix/bgzip/test.yml | 2 +- tests/modules/tabix/bgziptabix/main.nf | 2 +- .../modules/tabix/bgziptabix/nextflow.config | 9 ++ tests/modules/tabix/bgziptabix/test.yml | 2 +- tests/modules/tabix/tabix/main.nf | 6 +- tests/modules/tabix/tabix/nextflow.config | 17 +++ tests/modules/tabix/tabix/test.yml | 6 +- tests/modules/tbprofiler/profile/main.nf | 28 ++-- .../tbprofiler/profile/nextflow.config | 13 ++ tests/modules/tbprofiler/profile/test.yml | 4 +- tests/modules/tiddit/cov/main.nf | 2 +- tests/modules/tiddit/cov/nextflow.config | 5 + tests/modules/tiddit/cov/test.yml | 4 +- tests/modules/tiddit/sv/main.nf | 2 +- tests/modules/tiddit/sv/nextflow.config | 5 + tests/modules/tiddit/sv/test.yml | 4 +- tests/modules/trimgalore/main.nf | 2 +- tests/modules/trimgalore/nextflow.config | 5 + tests/modules/trimgalore/test.yml | 4 +- tests/modules/ucsc/bed12tobigbed/main.nf | 2 +- .../ucsc/bed12tobigbed/nextflow.config | 5 + tests/modules/ucsc/bed12tobigbed/test.yml | 2 +- tests/modules/ucsc/bedclip/main.nf | 2 +- tests/modules/ucsc/bedclip/nextflow.config | 9 ++ tests/modules/ucsc/bedclip/test.yml | 2 +- tests/modules/ucsc/bedgraphtobigwig/main.nf | 2 +- .../ucsc/bedgraphtobigwig/nextflow.config | 5 + tests/modules/ucsc/bedgraphtobigwig/test.yml | 2 +- .../modules/ucsc/bigwigaverageoverbed/main.nf | 2 +- .../ucsc/bigwigaverageoverbed/nextflow.config | 5 + .../ucsc/bigwigaverageoverbed/test.yml | 2 +- tests/modules/ucsc/liftover/main.nf | 2 +- tests/modules/ucsc/liftover/nextflow.config | 5 + tests/modules/ucsc/liftover/test.yml | 2 +- tests/modules/ucsc/wigtobigwig/main.nf | 2 +- .../modules/ucsc/wigtobigwig/nextflow.config | 5 + tests/modules/ucsc/wigtobigwig/test.yml | 2 +- tests/modules/ultra/pipeline/main.nf | 6 +- tests/modules/ultra/pipeline/nextflow.config | 10 ++ tests/modules/ultra/pipeline/test.yml | 2 +- tests/modules/unicycler/main.nf | 2 +- tests/modules/unicycler/nextflow.config | 5 + tests/modules/unicycler/test.yml | 6 +- tests/modules/untar/main.nf | 2 +- tests/modules/untar/nextflow.config | 5 + tests/modules/untar/test.yml | 2 +- tests/modules/unzip/main.nf | 2 +- tests/modules/unzip/nextflow.config | 5 + tests/modules/unzip/test.yml | 2 +- tests/modules/variantbam/main.nf | 2 +- tests/modules/variantbam/nextflow.config | 9 ++ tests/modules/variantbam/test.yml | 2 +- tests/modules/vcftools/main.nf | 4 +- tests/modules/vcftools/nextflow.config | 13 ++ tests/modules/vcftools/test.yml | 8 +- tests/modules/yara/index/main.nf | 2 +- tests/modules/yara/index/nextflow.config | 5 + tests/modules/yara/index/test.yml | 2 +- tests/modules/yara/mapper/main.nf | 24 ++-- tests/modules/yara/mapper/nextflow.config | 13 ++ tests/modules/yara/mapper/test.yml | 56 ++++---- .../nf-core/align_bowtie2/test.yml | 14 +- .../nf-core/bam_sort_samtools/test.yml | 22 +-- .../nf-core/bam_stats_samtools/test.yml | 24 ++-- .../nf-core/gatk_create_som_pon/test.yml | 5 +- .../test.yml | 3 +- .../test.yml | 3 +- tests/subworkflows/nf-core/sra_fastq/test.yml | 18 +-- 1803 files changed, 7649 insertions(+), 36540 deletions(-) delete mode 100644 modules/abacas/functions.nf delete mode 100644 modules/adapterremoval/functions.nf delete mode 100644 modules/agrvate/functions.nf delete mode 100644 modules/allelecounter/functions.nf delete mode 100644 modules/amps/functions.nf delete mode 100644 modules/arriba/functions.nf delete mode 100644 modules/artic/guppyplex/functions.nf delete mode 100644 modules/artic/minion/functions.nf delete mode 100644 modules/assemblyscan/functions.nf delete mode 100644 modules/ataqv/ataqv/functions.nf delete mode 100644 modules/bakta/functions.nf delete mode 100644 modules/bamaligncleaner/functions.nf delete mode 100644 modules/bamtools/split/functions.nf delete mode 100644 modules/bamutil/trimbam/functions.nf delete mode 100644 modules/bandage/image/functions.nf delete mode 100644 modules/bbmap/align/functions.nf delete mode 100644 modules/bbmap/bbduk/functions.nf delete mode 100644 modules/bbmap/bbsplit/functions.nf delete mode 100644 modules/bbmap/index/functions.nf delete mode 100644 modules/bcftools/concat/functions.nf delete mode 100644 modules/bcftools/consensus/functions.nf delete mode 100644 modules/bcftools/filter/functions.nf delete mode 100644 modules/bcftools/index/functions.nf delete mode 100644 modules/bcftools/isec/functions.nf delete mode 100644 modules/bcftools/merge/functions.nf delete mode 100644 modules/bcftools/mpileup/functions.nf delete mode 100644 modules/bcftools/norm/functions.nf delete mode 100644 modules/bcftools/query/functions.nf delete mode 100644 modules/bcftools/reheader/functions.nf delete mode 100644 modules/bcftools/stats/functions.nf delete mode 100644 modules/bcftools/view/functions.nf delete mode 100644 modules/bedtools/bamtobed/functions.nf delete mode 100644 modules/bedtools/complement/functions.nf delete mode 100644 modules/bedtools/genomecov/functions.nf delete mode 100644 modules/bedtools/getfasta/functions.nf delete mode 100644 modules/bedtools/intersect/functions.nf delete mode 100644 modules/bedtools/makewindows/functions.nf delete mode 100644 modules/bedtools/maskfasta/functions.nf delete mode 100644 modules/bedtools/merge/functions.nf delete mode 100644 modules/bedtools/slop/functions.nf delete mode 100644 modules/bedtools/sort/functions.nf delete mode 100644 modules/bedtools/subtract/functions.nf delete mode 100644 modules/bismark/align/functions.nf delete mode 100644 modules/bismark/deduplicate/functions.nf delete mode 100644 modules/bismark/genomepreparation/functions.nf delete mode 100644 modules/bismark/methylationextractor/functions.nf delete mode 100644 modules/bismark/report/functions.nf delete mode 100644 modules/bismark/summary/functions.nf delete mode 100644 modules/blast/blastn/functions.nf delete mode 100644 modules/blast/makeblastdb/functions.nf delete mode 100644 modules/bowtie/align/functions.nf delete mode 100644 modules/bowtie/build/functions.nf delete mode 100644 modules/bowtie2/align/functions.nf delete mode 100644 modules/bowtie2/build/functions.nf delete mode 100644 modules/bwa/aln/functions.nf delete mode 100644 modules/bwa/index/functions.nf delete mode 100644 modules/bwa/mem/functions.nf delete mode 100644 modules/bwa/sampe/functions.nf delete mode 100644 modules/bwa/samse/functions.nf delete mode 100644 modules/bwamem2/index/functions.nf delete mode 100644 modules/bwamem2/mem/functions.nf delete mode 100644 modules/bwameth/align/functions.nf delete mode 100644 modules/bwameth/index/functions.nf delete mode 100644 modules/cat/cat/functions.nf delete mode 100644 modules/cat/fastq/functions.nf delete mode 100644 modules/cellranger/mkref/functions.nf delete mode 100644 modules/checkm/lineagewf/functions.nf delete mode 100644 modules/chromap/chromap/functions.nf delete mode 100644 modules/chromap/index/functions.nf delete mode 100644 modules/clonalframeml/functions.nf delete mode 100644 modules/cmseq/polymut/functions.nf delete mode 100755 modules/cnvkit/batch/functions.nf delete mode 100644 modules/cooler/cload/functions.nf delete mode 100644 modules/cooler/digest/functions.nf delete mode 100644 modules/cooler/dump/functions.nf delete mode 100644 modules/cooler/merge/functions.nf delete mode 100644 modules/cooler/zoomify/functions.nf delete mode 100644 modules/csvtk/concat/functions.nf delete mode 100644 modules/csvtk/split/functions.nf delete mode 100644 modules/custom/dumpsoftwareversions/functions.nf create mode 100644 modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py delete mode 100644 modules/custom/getchromsizes/functions.nf delete mode 100644 modules/cutadapt/functions.nf delete mode 100644 modules/damageprofiler/functions.nf delete mode 100644 modules/dastool/dastool/functions.nf delete mode 100644 modules/dastool/scaffolds2bin/functions.nf delete mode 100644 modules/dedup/functions.nf delete mode 100644 modules/deeptools/computematrix/functions.nf delete mode 100644 modules/deeptools/plotfingerprint/functions.nf delete mode 100644 modules/deeptools/plotheatmap/functions.nf delete mode 100644 modules/deeptools/plotprofile/functions.nf delete mode 100644 modules/delly/call/functions.nf delete mode 100644 modules/diamond/blastp/functions.nf delete mode 100644 modules/diamond/blastx/functions.nf delete mode 100644 modules/diamond/makedb/functions.nf delete mode 100644 modules/dragonflye/functions.nf delete mode 100644 modules/dshbio/exportsegments/functions.nf delete mode 100644 modules/dshbio/filterbed/functions.nf delete mode 100644 modules/dshbio/filtergff3/functions.nf delete mode 100644 modules/dshbio/splitbed/functions.nf delete mode 100644 modules/dshbio/splitgff3/functions.nf delete mode 100644 modules/ectyper/functions.nf delete mode 100644 modules/emmtyper/functions.nf delete mode 100644 modules/ensemblvep/functions.nf delete mode 100644 modules/expansionhunter/functions.nf delete mode 100644 modules/fargene/functions.nf delete mode 100644 modules/fastani/functions.nf delete mode 100644 modules/fastp/functions.nf delete mode 100644 modules/fastqc/functions.nf delete mode 100644 modules/fastqscan/functions.nf delete mode 100644 modules/fasttree/functions.nf delete mode 100644 modules/fgbio/callmolecularconsensusreads/functions.nf delete mode 100644 modules/fgbio/fastqtobam/functions.nf delete mode 100644 modules/fgbio/groupreadsbyumi/functions.nf delete mode 100644 modules/fgbio/sortbam/functions.nf delete mode 100644 modules/filtlong/functions.nf delete mode 100644 modules/flash/functions.nf delete mode 100644 modules/freebayes/functions.nf delete mode 100644 modules/gatk4/applybqsr/functions.nf delete mode 100644 modules/gatk4/baserecalibrator/functions.nf delete mode 100644 modules/gatk4/bedtointervallist/functions.nf delete mode 100644 modules/gatk4/calculatecontamination/functions.nf delete mode 100644 modules/gatk4/createsequencedictionary/functions.nf delete mode 100644 modules/gatk4/createsomaticpanelofnormals/functions.nf delete mode 100644 modules/gatk4/estimatelibrarycomplexity/functions.nf delete mode 100644 modules/gatk4/fastqtosam/functions.nf delete mode 100644 modules/gatk4/filtermutectcalls/functions.nf delete mode 100644 modules/gatk4/genomicsdbimport/functions.nf delete mode 100644 modules/gatk4/genotypegvcfs/functions.nf delete mode 100644 modules/gatk4/getpileupsummaries/functions.nf delete mode 100644 modules/gatk4/haplotypecaller/functions.nf delete mode 100644 modules/gatk4/indexfeaturefile/functions.nf delete mode 100644 modules/gatk4/intervallisttools/functions.nf delete mode 100644 modules/gatk4/learnreadorientationmodel/functions.nf delete mode 100644 modules/gatk4/markduplicates/functions.nf delete mode 100644 modules/gatk4/mergebamalignment/functions.nf delete mode 100644 modules/gatk4/mergevcfs/functions.nf delete mode 100644 modules/gatk4/mutect2/functions.nf delete mode 100644 modules/gatk4/revertsam/functions.nf delete mode 100644 modules/gatk4/samtofastq/functions.nf delete mode 100644 modules/gatk4/splitncigarreads/functions.nf delete mode 100644 modules/gatk4/variantfiltration/functions.nf delete mode 100644 modules/genmap/index/functions.nf delete mode 100644 modules/genmap/mappability/functions.nf delete mode 100644 modules/genrich/functions.nf delete mode 100644 modules/gffread/functions.nf delete mode 100644 modules/glnexus/functions.nf delete mode 100644 modules/graphmap2/align/functions.nf delete mode 100644 modules/graphmap2/index/functions.nf delete mode 100644 modules/gstama/collapse/functions.nf delete mode 100644 modules/gstama/merge/functions.nf delete mode 100644 modules/gtdbtk/classifywf/functions.nf delete mode 100644 modules/gubbins/functions.nf delete mode 100644 modules/gunc/downloaddb/functions.nf delete mode 100644 modules/gunc/run/functions.nf delete mode 100644 modules/gunzip/functions.nf delete mode 100644 modules/gunzip/test.txt.gz delete mode 100644 modules/hicap/functions.nf delete mode 100644 modules/hifiasm/functions.nf delete mode 100644 modules/hisat2/align/functions.nf delete mode 100644 modules/hisat2/build/functions.nf delete mode 100644 modules/hisat2/extractsplicesites/functions.nf delete mode 100644 modules/hmmcopy/gccounter/functions.nf delete mode 100644 modules/hmmcopy/readcounter/functions.nf delete mode 100644 modules/hmmer/hmmalign/functions.nf delete mode 100644 modules/homer/annotatepeaks/functions.nf delete mode 100644 modules/homer/findpeaks/functions.nf delete mode 100644 modules/homer/maketagdirectory/functions.nf delete mode 100644 modules/homer/makeucscfile/functions.nf delete mode 100644 modules/idr/functions.nf delete mode 100644 modules/imputeme/vcftoprs/functions.nf delete mode 100644 modules/iqtree/functions.nf delete mode 100644 modules/ismapper/functions.nf delete mode 100644 modules/isoseq3/cluster/functions.nf delete mode 100644 modules/isoseq3/refine/functions.nf delete mode 100644 modules/ivar/consensus/functions.nf delete mode 100644 modules/ivar/trim/functions.nf delete mode 100644 modules/ivar/variants/functions.nf delete mode 100644 modules/jupyternotebook/functions.nf delete mode 100644 modules/kallisto/index/functions.nf delete mode 100644 modules/kallistobustools/count/functions.nf delete mode 100644 modules/kallistobustools/ref/functions.nf delete mode 100644 modules/khmer/normalizebymedian/functions.nf delete mode 100644 modules/kleborate/functions.nf delete mode 100644 modules/kraken2/kraken2/functions.nf create mode 100644 modules/krona/kronadb/main.nf rename modules/{kronatools => krona}/kronadb/meta.yml (95%) create mode 100644 modules/krona/ktimporttaxonomy/main.nf rename modules/{kronatools => krona}/ktimporttaxonomy/meta.yml (95%) delete mode 100644 modules/kronatools/kronadb/functions.nf delete mode 100644 modules/kronatools/kronadb/main.nf delete mode 100644 modules/kronatools/ktimporttaxonomy/functions.nf delete mode 100644 modules/kronatools/ktimporttaxonomy/main.nf delete mode 100644 modules/last/dotplot/functions.nf delete mode 100644 modules/last/lastal/functions.nf delete mode 100644 modules/last/lastdb/functions.nf delete mode 100644 modules/last/mafconvert/functions.nf delete mode 100644 modules/last/mafswap/functions.nf delete mode 100644 modules/last/postmask/functions.nf delete mode 100644 modules/last/split/functions.nf delete mode 100644 modules/last/train/functions.nf delete mode 100644 modules/leehom/functions.nf delete mode 100644 modules/lib/functions.nf delete mode 100644 modules/lima/functions.nf delete mode 100644 modules/lissero/functions.nf delete mode 100644 modules/lofreq/call/functions.nf delete mode 100644 modules/lofreq/callparallel/functions.nf delete mode 100644 modules/lofreq/filter/functions.nf delete mode 100644 modules/lofreq/indelqual/functions.nf delete mode 100644 modules/macs2/callpeak/functions.nf delete mode 100644 modules/malt/build/functions.nf delete mode 100644 modules/malt/run/functions.nf delete mode 100644 modules/maltextract/functions.nf delete mode 100644 modules/manta/germline/functions.nf delete mode 100644 modules/manta/somatic/functions.nf delete mode 100644 modules/manta/tumoronly/functions.nf delete mode 100644 modules/mapdamage2/functions.nf delete mode 100644 modules/mash/sketch/functions.nf delete mode 100644 modules/mashtree/functions.nf delete mode 100644 modules/maxbin2/functions.nf delete mode 100644 modules/medaka/functions.nf delete mode 100644 modules/megahit/functions.nf delete mode 100644 modules/meningotype/functions.nf delete mode 100644 modules/metabat2/jgisummarizebamcontigdepths/functions.nf delete mode 100644 modules/metabat2/metabat2/functions.nf delete mode 100644 modules/metaphlan3/functions.nf delete mode 100644 modules/methyldackel/extract/functions.nf delete mode 100644 modules/methyldackel/mbias/functions.nf delete mode 100644 modules/minia/functions.nf delete mode 100644 modules/miniasm/functions.nf delete mode 100644 modules/minimap2/align/functions.nf delete mode 100644 modules/minimap2/index/functions.nf delete mode 100644 modules/mlst/functions.nf delete mode 100644 modules/mosdepth/functions.nf delete mode 100644 modules/msisensor/msi/functions.nf delete mode 100644 modules/msisensor/scan/functions.nf delete mode 100644 modules/mtnucratio/functions.nf delete mode 100644 modules/multiqc/functions.nf delete mode 100644 modules/mummer/functions.nf delete mode 100644 modules/muscle/functions.nf delete mode 100644 modules/nanolyse/functions.nf delete mode 100644 modules/nanoplot/functions.nf delete mode 100644 modules/ncbigenomedownload/functions.nf delete mode 100755 modules/nextclade/functions.nf delete mode 100644 modules/ngmaster/functions.nf delete mode 100644 modules/nucmer/functions.nf delete mode 100644 modules/optitype/functions.nf delete mode 100644 modules/pairix/functions.nf delete mode 100644 modules/pairtools/dedup/functions.nf delete mode 100644 modules/pairtools/flip/functions.nf delete mode 100644 modules/pairtools/parse/functions.nf delete mode 100644 modules/pairtools/restrict/functions.nf delete mode 100644 modules/pairtools/select/functions.nf delete mode 100644 modules/pairtools/sort/functions.nf delete mode 100644 modules/pangolin/functions.nf delete mode 100644 modules/paraclu/functions.nf delete mode 100644 modules/pbbam/pbmerge/functions.nf delete mode 100644 modules/pbccs/functions.nf delete mode 100644 modules/peddy/functions.nf delete mode 100644 modules/phantompeakqualtools/functions.nf delete mode 100644 modules/phyloflash/functions.nf delete mode 100644 modules/picard/collecthsmetrics/functions.nf delete mode 100644 modules/picard/collectmultiplemetrics/functions.nf delete mode 100644 modules/picard/collectwgsmetrics/functions.nf delete mode 100644 modules/picard/filtersamreads/functions.nf delete mode 100644 modules/picard/markduplicates/functions.nf delete mode 100644 modules/picard/mergesamfiles/functions.nf delete mode 100644 modules/picard/sortsam/functions.nf delete mode 100644 modules/pirate/functions.nf delete mode 100644 modules/plasmidid/functions.nf delete mode 100644 modules/plink/extract/functions.nf delete mode 100644 modules/plink/vcf/functions.nf delete mode 100644 modules/plink2/vcf/functions.nf delete mode 100644 modules/pmdtools/filter/functions.nf delete mode 100644 modules/porechop/functions.nf delete mode 100644 modules/preseq/lcextrap/functions.nf delete mode 100644 modules/prodigal/functions.nf delete mode 100644 modules/prokka/functions.nf delete mode 100644 modules/pycoqc/functions.nf delete mode 100644 modules/pydamage/analyze/functions.nf delete mode 100644 modules/pydamage/filter/functions.nf delete mode 100644 modules/qcat/functions.nf delete mode 100644 modules/qualimap/bamqc/functions.nf delete mode 100644 modules/qualimap/rnaseq/functions.nf delete mode 100644 modules/quast/functions.nf delete mode 100644 modules/racon/functions.nf delete mode 100644 modules/rapidnj/functions.nf delete mode 100644 modules/rasusa/functions.nf delete mode 100644 modules/raxmlng/functions.nf delete mode 100644 modules/rmarkdownnotebook/functions.nf delete mode 100644 modules/roary/functions.nf delete mode 100644 modules/rsem/calculateexpression/functions.nf delete mode 100644 modules/rsem/preparereference/functions.nf delete mode 100644 modules/rseqc/bamstat/functions.nf delete mode 100644 modules/rseqc/inferexperiment/functions.nf delete mode 100644 modules/rseqc/innerdistance/functions.nf delete mode 100644 modules/rseqc/junctionannotation/functions.nf delete mode 100644 modules/rseqc/junctionsaturation/functions.nf delete mode 100644 modules/rseqc/readdistribution/functions.nf delete mode 100644 modules/rseqc/readduplication/functions.nf delete mode 100644 modules/salmon/index/functions.nf delete mode 100644 modules/salmon/quant/functions.nf delete mode 100644 modules/samblaster/functions.nf delete mode 100644 modules/samtools/ampliconclip/functions.nf delete mode 100644 modules/samtools/bam2fq/functions.nf delete mode 100644 modules/samtools/depth/functions.nf delete mode 100644 modules/samtools/faidx/functions.nf delete mode 100644 modules/samtools/fastq/functions.nf delete mode 100644 modules/samtools/fixmate/functions.nf delete mode 100644 modules/samtools/flagstat/functions.nf delete mode 100644 modules/samtools/idxstats/functions.nf delete mode 100644 modules/samtools/index/functions.nf delete mode 100644 modules/samtools/merge/functions.nf delete mode 100644 modules/samtools/mpileup/functions.nf delete mode 100644 modules/samtools/sort/functions.nf delete mode 100644 modules/samtools/stats/functions.nf delete mode 100644 modules/samtools/view/functions.nf delete mode 100644 modules/scoary/functions.nf delete mode 100644 modules/seacr/callpeak/functions.nf delete mode 100644 modules/seqkit/split2/functions.nf delete mode 100644 modules/seqsero2/functions.nf delete mode 100644 modules/seqtk/mergepe/functions.nf delete mode 100644 modules/seqtk/sample/functions.nf delete mode 100644 modules/seqtk/subseq/functions.nf delete mode 100755 modules/sequenzautils/bam2seqz/functions.nf delete mode 100755 modules/sequenzautils/gcwiggle/functions.nf delete mode 100644 modules/seqwish/induce/functions.nf delete mode 100644 modules/shovill/functions.nf delete mode 100644 modules/snpdists/functions.nf delete mode 100644 modules/snpeff/functions.nf delete mode 100644 modules/snpsites/functions.nf delete mode 100644 modules/sortmerna/functions.nf delete mode 100644 modules/spades/functions.nf delete mode 100644 modules/spatyper/functions.nf delete mode 100644 modules/sratools/fasterqdump/functions.nf delete mode 100644 modules/sratools/prefetch/functions.nf delete mode 100644 modules/staphopiasccmec/functions.nf delete mode 100644 modules/star/align/functions.nf delete mode 100644 modules/star/genomegenerate/functions.nf delete mode 100644 modules/strelka/germline/functions.nf delete mode 100644 modules/strelka/somatic/functions.nf delete mode 100644 modules/stringtie/merge/functions.nf delete mode 100644 modules/stringtie/stringtie/functions.nf delete mode 100644 modules/subread/featurecounts/functions.nf delete mode 100644 modules/tabix/bgzip/functions.nf delete mode 100644 modules/tabix/bgziptabix/functions.nf delete mode 100644 modules/tabix/tabix/functions.nf delete mode 100644 modules/tbprofiler/profile/functions.nf delete mode 100644 modules/tiddit/cov/functions.nf delete mode 100644 modules/tiddit/sv/functions.nf delete mode 100644 modules/trimgalore/functions.nf delete mode 100644 modules/ucsc/bed12tobigbed/functions.nf delete mode 100755 modules/ucsc/bedclip/functions.nf delete mode 100644 modules/ucsc/bedgraphtobigwig/functions.nf delete mode 100755 modules/ucsc/bigwigaverageoverbed/functions.nf delete mode 100644 modules/ucsc/liftover/functions.nf delete mode 100755 modules/ucsc/wigtobigwig/functions.nf delete mode 100644 modules/ultra/pipeline/functions.nf delete mode 100644 modules/umitools/dedup/functions.nf delete mode 100644 modules/umitools/extract/functions.nf delete mode 100644 modules/unicycler/functions.nf delete mode 100644 modules/untar/functions.nf delete mode 100644 modules/unzip/functions.nf delete mode 100644 modules/variantbam/functions.nf delete mode 100644 modules/vcftools/functions.nf delete mode 100644 modules/yara/index/functions.nf delete mode 100644 modules/yara/mapper/functions.nf create mode 100644 tests/modules/abacas/nextflow.config create mode 100644 tests/modules/adapterremoval/nextflow.config create mode 100644 tests/modules/agrvate/nextflow.config create mode 100644 tests/modules/allelecounter/nextflow.config create mode 100644 tests/modules/amps/nextflow.config create mode 100644 tests/modules/arriba/nextflow.config create mode 100644 tests/modules/artic/guppyplex/nextflow.config create mode 100644 tests/modules/artic/minion/nextflow.config create mode 100644 tests/modules/assemblyscan/nextflow.config create mode 100644 tests/modules/ataqv/ataqv/nextflow.config create mode 100644 tests/modules/bakta/nextflow.config create mode 100644 tests/modules/bamaligncleaner/nextflow.config create mode 100644 tests/modules/bamtools/split/nextflow.config create mode 100644 tests/modules/bamutil/trimbam/nextflow.config create mode 100644 tests/modules/bandage/image/nextflow.config create mode 100644 tests/modules/bbmap/align/nextflow.config create mode 100644 tests/modules/bbmap/bbduk/nextflow.config create mode 100644 tests/modules/bbmap/bbsplit/nextflow.config create mode 100644 tests/modules/bbmap/index/nextflow.config create mode 100644 tests/modules/bcftools/concat/nextflow.config create mode 100644 tests/modules/bcftools/consensus/nextflow.config create mode 100644 tests/modules/bcftools/filter/nextflow.config create mode 100644 tests/modules/bcftools/index/nextflow.config create mode 100644 tests/modules/bcftools/isec/nextflow.config create mode 100644 tests/modules/bcftools/merge/nextflow.config create mode 100644 tests/modules/bcftools/mpileup/nextflow.config create mode 100644 tests/modules/bcftools/norm/nextflow.config create mode 100644 tests/modules/bcftools/query/nextflow.config create mode 100644 tests/modules/bcftools/reheader/nextflow.config create mode 100644 tests/modules/bcftools/stats/nextflow.config create mode 100644 tests/modules/bcftools/view/nextflow.config create mode 100644 tests/modules/bedtools/bamtobed/nextflow.config create mode 100644 tests/modules/bedtools/complement/nextflow.config create mode 100644 tests/modules/bedtools/genomecov/nextflow.config create mode 100644 tests/modules/bedtools/getfasta/nextflow.config create mode 100644 tests/modules/bedtools/intersect/nextflow.config create mode 100644 tests/modules/bedtools/makewindows/nextflow.config create mode 100644 tests/modules/bedtools/maskfasta/nextflow.config create mode 100644 tests/modules/bedtools/merge/nextflow.config create mode 100644 tests/modules/bedtools/slop/nextflow.config create mode 100644 tests/modules/bedtools/sort/nextflow.config create mode 100644 tests/modules/bedtools/subtract/nextflow.config create mode 100644 tests/modules/bismark/align/nextflow.config create mode 100644 tests/modules/bismark/deduplicate/nextflow.config create mode 100644 tests/modules/bismark/genomepreparation/nextflow.config create mode 100644 tests/modules/bismark/methylationextractor/nextflow.config create mode 100644 tests/modules/bismark/report/nextflow.config create mode 100644 tests/modules/bismark/summary/nextflow.config create mode 100644 tests/modules/blast/blastn/nextflow.config create mode 100644 tests/modules/blast/makeblastdb/nextflow.config create mode 100644 tests/modules/bowtie/align/nextflow.config create mode 100644 tests/modules/bowtie/build_test/nextflow.config create mode 100644 tests/modules/bowtie2/align/nextflow.config create mode 100644 tests/modules/bowtie2/build_test/nextflow.config create mode 100644 tests/modules/bwa/aln/nextflow.config create mode 100644 tests/modules/bwa/index/nextflow.config create mode 100644 tests/modules/bwa/mem/nextflow.config create mode 100644 tests/modules/bwa/sampe/nextflow.config create mode 100644 tests/modules/bwa/samse/nextflow.config create mode 100644 tests/modules/bwamem2/index/nextflow.config create mode 100644 tests/modules/bwamem2/mem/nextflow.config create mode 100644 tests/modules/bwameth/align/nextflow.config create mode 100644 tests/modules/bwameth/index/nextflow.config create mode 100644 tests/modules/cat/cat/nextflow.config create mode 100644 tests/modules/cat/fastq/nextflow.config create mode 100644 tests/modules/cellranger/mkref/nextflow.config create mode 100644 tests/modules/checkm/lineagewf/nextflow.config create mode 100644 tests/modules/chromap/chromap/nextflow.config create mode 100644 tests/modules/chromap/index/nextflow.config create mode 100644 tests/modules/clonalframeml/nextflow.config create mode 100644 tests/modules/cmseq/polymut/nextflow.config create mode 100644 tests/modules/cnvkit/batch/nextflow.config create mode 100644 tests/modules/cooler/cload/nextflow.config create mode 100644 tests/modules/cooler/digest/nextflow.config create mode 100644 tests/modules/cooler/dump/nextflow.config create mode 100644 tests/modules/cooler/merge/nextflow.config create mode 100644 tests/modules/cooler/zoomify/nextflow.config create mode 100644 tests/modules/csvtk/concat/nextflow.config create mode 100644 tests/modules/csvtk/split/nextflow.config create mode 100644 tests/modules/custom/dumpsoftwareversions/nextflow.config create mode 100644 tests/modules/custom/getchromsizes/nextflow.config create mode 100644 tests/modules/cutadapt/nextflow.config create mode 100644 tests/modules/damageprofiler/nextflow.config create mode 100644 tests/modules/dastool/dastool/nextflow.config create mode 100644 tests/modules/dastool/scaffolds2bin/nextflow.config create mode 100644 tests/modules/dedup/nextflow.config create mode 100644 tests/modules/deeptools/computematrix/nextflow.config create mode 100644 tests/modules/deeptools/plotfingerprint/nextflow.config create mode 100644 tests/modules/deeptools/plotheatmap/nextflow.config create mode 100644 tests/modules/deeptools/plotprofile/nextflow.config create mode 100644 tests/modules/delly/call/nextflow.config create mode 100644 tests/modules/diamond/blastp/nextflow.config create mode 100644 tests/modules/diamond/blastx/nextflow.config create mode 100644 tests/modules/diamond/makedb/nextflow.config create mode 100644 tests/modules/dragonflye/nextflow.config create mode 100644 tests/modules/dshbio/exportsegments/nextflow.config create mode 100644 tests/modules/dshbio/filterbed/nextflow.config create mode 100644 tests/modules/dshbio/filtergff3/nextflow.config create mode 100644 tests/modules/dshbio/splitbed/nextflow.config create mode 100644 tests/modules/dshbio/splitgff3/nextflow.config create mode 100644 tests/modules/ectyper/nextflow.config create mode 100644 tests/modules/emmtyper/nextflow.config create mode 100644 tests/modules/ensemblvep/nextflow.config create mode 100644 tests/modules/expansionhunter/nextflow.config create mode 100644 tests/modules/fargene/nextflow.config create mode 100644 tests/modules/fastani/nextflow.config create mode 100644 tests/modules/fastp/nextflow.config create mode 100644 tests/modules/fastqc/nextflow.config create mode 100644 tests/modules/fastqscan/nextflow.config create mode 100644 tests/modules/fasttree/nextflow.config create mode 100644 tests/modules/fgbio/callmolecularconsensusreads/nextflow.config create mode 100644 tests/modules/fgbio/fastqtobam/nextflow.config create mode 100644 tests/modules/fgbio/groupreadsbyumi/nextflow.config create mode 100644 tests/modules/fgbio/sortbam/nextflow.config create mode 100644 tests/modules/filtlong/nextflow.config create mode 100644 tests/modules/flash/nextflow.config create mode 100644 tests/modules/freebayes/nextflow.config create mode 100644 tests/modules/gatk4/applybqsr/nextflow.config create mode 100644 tests/modules/gatk4/baserecalibrator/nextflow.config create mode 100644 tests/modules/gatk4/bedtointervallist/nextflow.config create mode 100644 tests/modules/gatk4/calculatecontamination/nextflow.config create mode 100644 tests/modules/gatk4/createsequencedictionary/nextflow.config create mode 100644 tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config create mode 100644 tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config create mode 100644 tests/modules/gatk4/fastqtosam/nextflow.config create mode 100644 tests/modules/gatk4/filtermutectcalls/nextflow.config create mode 100644 tests/modules/gatk4/genomicsdbimport/nextflow.config create mode 100644 tests/modules/gatk4/genotypegvcfs/nextflow.config create mode 100644 tests/modules/gatk4/getpileupsummaries/nextflow.config create mode 100644 tests/modules/gatk4/haplotypecaller/nextflow.config create mode 100644 tests/modules/gatk4/indexfeaturefile/nextflow.config create mode 100644 tests/modules/gatk4/intervallisttools/nextflow.config create mode 100644 tests/modules/gatk4/learnreadorientationmodel/nextflow.config create mode 100644 tests/modules/gatk4/markduplicates/nextflow.config create mode 100644 tests/modules/gatk4/mergebamalignment/nextflow.config create mode 100644 tests/modules/gatk4/mergevcfs/nextflow.config create mode 100644 tests/modules/gatk4/mutect2/nextflow.config create mode 100644 tests/modules/gatk4/revertsam/nextflow.config create mode 100644 tests/modules/gatk4/samtofastq/nextflow.config create mode 100644 tests/modules/gatk4/splitncigarreads/nextflow.config create mode 100644 tests/modules/gatk4/variantfiltration/nextflow.config create mode 100644 tests/modules/genmap/index/nextflow.config create mode 100644 tests/modules/genmap/mappability/nextflow.config create mode 100644 tests/modules/genrich/nextflow.config create mode 100644 tests/modules/gffread/nextflow.config create mode 100644 tests/modules/glnexus/nextflow.config create mode 100644 tests/modules/graphmap2/align/nextflow.config create mode 100644 tests/modules/graphmap2/index/nextflow.config create mode 100644 tests/modules/gstama/collapse/nextflow.config create mode 100644 tests/modules/gstama/merge/nextflow.config create mode 100644 tests/modules/gtdbtk/classifywf/nextflow.config create mode 100644 tests/modules/gubbins/nextflow.config create mode 100644 tests/modules/gunc/downloaddb/nextflow.config create mode 100644 tests/modules/gunc/run/nextflow.config create mode 100644 tests/modules/gunzip/nextflow.config create mode 100644 tests/modules/hicap/nextflow.config create mode 100644 tests/modules/hifiasm/nextflow.config create mode 100644 tests/modules/hisat2/align/nextflow.config create mode 100644 tests/modules/hisat2/build_test/nextflow.config create mode 100644 tests/modules/hisat2/extractsplicesites/nextflow.config create mode 100644 tests/modules/hmmcopy/gccounter/nextflow.config create mode 100644 tests/modules/hmmcopy/readcounter/nextflow.config create mode 100644 tests/modules/hmmer/hmmalign/nextflow.config create mode 100644 tests/modules/homer/annotatepeaks/nextflow.config create mode 100644 tests/modules/homer/findpeaks/nextflow.config create mode 100644 tests/modules/homer/maketagdirectory/nextflow.config create mode 100644 tests/modules/homer/makeucscfile/nextflow.config create mode 100644 tests/modules/idr/nextflow.config create mode 100644 tests/modules/imputeme/vcftoprs/nextflow.config create mode 100644 tests/modules/iqtree/nextflow.config create mode 100644 tests/modules/ismapper/nextflow.config create mode 100644 tests/modules/isoseq3/cluster/nextflow.config create mode 100644 tests/modules/isoseq3/refine/nextflow.config create mode 100644 tests/modules/ivar/consensus/nextflow.config create mode 100644 tests/modules/ivar/trim/nextflow.config create mode 100644 tests/modules/ivar/variants/nextflow.config create mode 100644 tests/modules/jupyternotebook/nextflow.config create mode 100644 tests/modules/kallisto/index/nextflow.config create mode 100644 tests/modules/kallistobustools/count/nextflow.config create mode 100644 tests/modules/kallistobustools/ref/nextflow.config create mode 100644 tests/modules/khmer/normalizebymedian/nextflow.config create mode 100644 tests/modules/kleborate/nextflow.config create mode 100644 tests/modules/kraken2/kraken2/nextflow.config create mode 100644 tests/modules/krona/kronadb/main.nf create mode 100644 tests/modules/krona/kronadb/nextflow.config create mode 100644 tests/modules/krona/kronadb/test.yml create mode 100644 tests/modules/krona/ktimporttaxonomy/main.nf create mode 100644 tests/modules/krona/ktimporttaxonomy/nextflow.config create mode 100644 tests/modules/krona/ktimporttaxonomy/test.yml delete mode 100644 tests/modules/kronatools/kronadb/main.nf delete mode 100644 tests/modules/kronatools/kronadb/test.yml delete mode 100644 tests/modules/kronatools/ktimporttaxonomy/main.nf delete mode 100644 tests/modules/kronatools/ktimporttaxonomy/test.yml create mode 100644 tests/modules/last/dotplot/nextflow.config create mode 100644 tests/modules/last/lastal/nextflow.config create mode 100644 tests/modules/last/lastdb/nextflow.config create mode 100644 tests/modules/last/mafconvert/nextflow.config create mode 100644 tests/modules/last/mafswap/nextflow.config create mode 100644 tests/modules/last/postmask/nextflow.config create mode 100644 tests/modules/last/split/nextflow.config create mode 100644 tests/modules/last/train/nextflow.config create mode 100644 tests/modules/leehom/nextflow.config create mode 100644 tests/modules/lima/nextflow.config create mode 100644 tests/modules/lissero/nextflow.config create mode 100644 tests/modules/lofreq/call/nextflow.config create mode 100644 tests/modules/lofreq/callparallel/nextflow.config create mode 100644 tests/modules/lofreq/filter/nextflow.config create mode 100644 tests/modules/lofreq/indelqual/nextflow.config create mode 100644 tests/modules/macs2/callpeak/nextflow.config create mode 100644 tests/modules/malt/build_test/nextflow.config create mode 100644 tests/modules/malt/run/nextflow.config create mode 100644 tests/modules/maltextract/nextflow.config create mode 100644 tests/modules/manta/germline/nextflow.config create mode 100644 tests/modules/manta/somatic/nextflow.config create mode 100644 tests/modules/manta/tumoronly/nextflow.config create mode 100644 tests/modules/mapdamage2/nextflow.config create mode 100644 tests/modules/mash/sketch/nextflow.config create mode 100644 tests/modules/mashtree/nextflow.config create mode 100644 tests/modules/maxbin2/nextflow.config create mode 100644 tests/modules/medaka/nextflow.config create mode 100644 tests/modules/megahit/nextflow.config create mode 100644 tests/modules/meningotype/nextflow.config create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config create mode 100644 tests/modules/metabat2/metabat2/nextflow.config create mode 100644 tests/modules/metaphlan3/nextflow.config create mode 100644 tests/modules/methyldackel/extract/nextflow.config create mode 100644 tests/modules/methyldackel/mbias/nextflow.config create mode 100644 tests/modules/minia/nextflow.config create mode 100644 tests/modules/miniasm/nextflow.config create mode 100644 tests/modules/minimap2/align/nextflow.config create mode 100644 tests/modules/minimap2/index/nextflow.config create mode 100644 tests/modules/mlst/nextflow.config create mode 100644 tests/modules/mosdepth/nextflow.config create mode 100644 tests/modules/msisensor/msi/nextflow.config create mode 100644 tests/modules/msisensor/scan/nextflow.config create mode 100644 tests/modules/mtnucratio/nextflow.config create mode 100644 tests/modules/multiqc/nextflow.config create mode 100644 tests/modules/mummer/nextflow.config create mode 100644 tests/modules/muscle/nextflow.config create mode 100644 tests/modules/nanolyse/nextflow.config create mode 100644 tests/modules/nanoplot/nextflow.config create mode 100644 tests/modules/ncbigenomedownload/nextflow.config create mode 100644 tests/modules/nextclade/nextflow.config create mode 100644 tests/modules/ngmaster/nextflow.config create mode 100644 tests/modules/nucmer/nextflow.config create mode 100644 tests/modules/optitype/nextflow.config create mode 100644 tests/modules/pairix/nextflow.config create mode 100644 tests/modules/pairtools/dedup/nextflow.config create mode 100644 tests/modules/pairtools/flip/nextflow.config create mode 100644 tests/modules/pairtools/parse/nextflow.config create mode 100644 tests/modules/pairtools/restrict/nextflow.config create mode 100644 tests/modules/pairtools/select/nextflow.config create mode 100644 tests/modules/pairtools/sort/nextflow.config create mode 100644 tests/modules/pangolin/nextflow.config create mode 100644 tests/modules/paraclu/nextflow.config create mode 100644 tests/modules/pbbam/pbmerge/nextflow.config create mode 100644 tests/modules/pbccs/nextflow.config create mode 100644 tests/modules/peddy/nextflow.config create mode 100644 tests/modules/phyloflash/nextflow.config create mode 100644 tests/modules/picard/collecthsmetrics/nextflow.config create mode 100644 tests/modules/picard/collectmultiplemetrics/nextflow.config create mode 100644 tests/modules/picard/collectwgsmetrics/nextflow.config create mode 100644 tests/modules/picard/filtersamreads/nextflow.config create mode 100644 tests/modules/picard/markduplicates/nextflow.config create mode 100644 tests/modules/picard/mergesamfiles/nextflow.config create mode 100644 tests/modules/picard/sortsam/nextflow.config create mode 100644 tests/modules/pirate/nextflow.config create mode 100644 tests/modules/plasmidid/nextflow.config create mode 100644 tests/modules/plink/extract/nextflow.config create mode 100644 tests/modules/plink/vcf/nextflow.config create mode 100644 tests/modules/plink2/vcf/nextflow.config create mode 100644 tests/modules/pmdtools/filter/nextflow.config create mode 100644 tests/modules/porechop/nextflow.config create mode 100644 tests/modules/preseq/lcextrap/nextflow.config create mode 100644 tests/modules/prodigal/nextflow.config create mode 100644 tests/modules/prokka/nextflow.config create mode 100644 tests/modules/pycoqc/nextflow.config create mode 100644 tests/modules/pydamage/analyze/nextflow.config create mode 100644 tests/modules/pydamage/filter/nextflow.config create mode 100644 tests/modules/qcat/nextflow.config create mode 100644 tests/modules/qualimap/bamqc/nextflow.config create mode 100644 tests/modules/quast/nextflow.config create mode 100644 tests/modules/racon/nextflow.config create mode 100644 tests/modules/rapidnj/nextflow.config create mode 100644 tests/modules/rasusa/nextflow.config create mode 100644 tests/modules/raxmlng/nextflow.config create mode 100644 tests/modules/rmarkdownnotebook/nextflow.config create mode 100644 tests/modules/roary/nextflow.config create mode 100644 tests/modules/rsem/calculateexpression/nextflow.config create mode 100644 tests/modules/rsem/preparereference/nextflow.config create mode 100644 tests/modules/rseqc/bamstat/nextflow.config create mode 100644 tests/modules/rseqc/inferexperiment/nextflow.config create mode 100644 tests/modules/rseqc/innerdistance/nextflow.config create mode 100644 tests/modules/rseqc/junctionannotation/nextflow.config create mode 100644 tests/modules/rseqc/junctionsaturation/nextflow.config create mode 100644 tests/modules/rseqc/readdistribution/nextflow.config create mode 100644 tests/modules/rseqc/readduplication/nextflow.config create mode 100644 tests/modules/salmon/index/nextflow.config create mode 100644 tests/modules/salmon/quant/nextflow.config create mode 100644 tests/modules/samblaster/nextflow.config create mode 100644 tests/modules/samtools/ampliconclip/nextflow.config create mode 100644 tests/modules/samtools/bam2fq/nextflow.config create mode 100644 tests/modules/samtools/depth/nextflow.config create mode 100644 tests/modules/samtools/faidx/nextflow.config create mode 100644 tests/modules/samtools/fastq/nextflow.config create mode 100644 tests/modules/samtools/fixmate/nextflow.config create mode 100644 tests/modules/samtools/flagstat/nextflow.config create mode 100644 tests/modules/samtools/idxstats/nextflow.config create mode 100644 tests/modules/samtools/index/nextflow.config create mode 100644 tests/modules/samtools/merge/nextflow.config create mode 100644 tests/modules/samtools/mpileup/nextflow.config create mode 100644 tests/modules/samtools/sort/nextflow.config create mode 100644 tests/modules/samtools/stats/nextflow.config create mode 100644 tests/modules/samtools/view/nextflow.config create mode 100644 tests/modules/scoary/nextflow.config create mode 100644 tests/modules/seacr/callpeak/nextflow.config create mode 100644 tests/modules/seqkit/split2/nextflow.config create mode 100644 tests/modules/seqsero2/nextflow.config create mode 100644 tests/modules/seqtk/mergepe/nextflow.config create mode 100644 tests/modules/seqtk/sample/nextflow.config create mode 100644 tests/modules/seqtk/subseq/nextflow.config create mode 100644 tests/modules/sequenzautils/bam2seqz/nextflow.config create mode 100644 tests/modules/sequenzautils/gcwiggle/nextflow.config create mode 100644 tests/modules/seqwish/induce/nextflow.config create mode 100644 tests/modules/shovill/nextflow.config create mode 100644 tests/modules/snpdists/nextflow.config create mode 100644 tests/modules/snpeff/nextflow.config create mode 100644 tests/modules/snpsites/nextflow.config create mode 100644 tests/modules/spades/nextflow.config create mode 100644 tests/modules/spatyper/nextflow.config create mode 100644 tests/modules/sratools/fasterqdump/nextflow.config create mode 100644 tests/modules/sratools/prefetch/nextflow.config create mode 100644 tests/modules/staphopiasccmec/nextflow.config create mode 100644 tests/modules/star/align/nextflow.config create mode 100644 tests/modules/star/genomegenerate/nextflow.config create mode 100644 tests/modules/strelka/germline/nextflow.config create mode 100644 tests/modules/strelka/somatic/nextflow.config create mode 100644 tests/modules/stringtie/merge/nextflow.config create mode 100644 tests/modules/stringtie/stringtie/nextflow.config create mode 100644 tests/modules/subread/featurecounts/nextflow.config create mode 100644 tests/modules/tabix/bgzip/nextflow.config create mode 100644 tests/modules/tabix/bgziptabix/nextflow.config create mode 100644 tests/modules/tabix/tabix/nextflow.config create mode 100644 tests/modules/tbprofiler/profile/nextflow.config create mode 100644 tests/modules/tiddit/cov/nextflow.config create mode 100644 tests/modules/tiddit/sv/nextflow.config create mode 100644 tests/modules/trimgalore/nextflow.config create mode 100644 tests/modules/ucsc/bed12tobigbed/nextflow.config create mode 100644 tests/modules/ucsc/bedclip/nextflow.config create mode 100644 tests/modules/ucsc/bedgraphtobigwig/nextflow.config create mode 100644 tests/modules/ucsc/bigwigaverageoverbed/nextflow.config create mode 100644 tests/modules/ucsc/liftover/nextflow.config create mode 100644 tests/modules/ucsc/wigtobigwig/nextflow.config create mode 100644 tests/modules/ultra/pipeline/nextflow.config create mode 100644 tests/modules/unicycler/nextflow.config create mode 100644 tests/modules/untar/nextflow.config create mode 100644 tests/modules/unzip/nextflow.config create mode 100644 tests/modules/variantbam/nextflow.config create mode 100644 tests/modules/vcftools/nextflow.config create mode 100644 tests/modules/yara/index/nextflow.config create mode 100644 tests/modules/yara/mapper/nextflow.config diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 55b8c296..121dd865 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -7,7 +7,6 @@ on: pull_request: branches: [master] - jobs: changes: name: Check for changes @@ -25,9 +24,6 @@ jobs: lint: runs-on: ubuntu-20.04 - env: - NXF_VER: 21.04.0 - name: ${{ matrix.tags }} needs: changes if: needs.changes.outputs.modules != '[]' diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 7cbb2689..0bd892c8 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -23,13 +23,12 @@ jobs: test: runs-on: ubuntu-20.04 - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} + name: ${{ matrix.tags }} ${{ matrix.profile }} needs: changes if: needs.changes.outputs.modules != '[]' strategy: fail-fast: false matrix: - nxf_version: ["21.04.0"] tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] profile: ["docker", "singularity", "conda"] env: @@ -60,13 +59,12 @@ jobs: - uses: actions/cache@v2 with: path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} + key: ${{ runner.os }} restore-keys: | ${{ runner.os }}-nextflow- - name: Install Nextflow env: - NXF_VER: ${{ matrix.nxf_version }} CAPSULE_LOG: none run: | wget -qO- get.nextflow.io | bash @@ -93,13 +91,13 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - name: Upload logs on failure if: failure() uses: actions/upload-artifact@v2 with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} + name: logs-${{ matrix.profile }} path: | /home/runner/pytest_workflow_*/*/.nextflow.log /home/runner/pytest_workflow_*/*/log.out diff --git a/README.md b/README.md index f25b37d9..beee42e7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # ![nf-core/modules](docs/images/nfcore-modules_logo.png) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -78,7 +78,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi nextflow.enable.dsl = 2 - include { FASTQC } from './modules/nf-core/modules/fastqc/main' addParams( options: [:] ) + include { FASTQC } from './modules/nf-core/modules/fastqc/main' ``` 5. Remove the module from the pipeline repository if required: diff --git a/modules/abacas/functions.nf b/modules/abacas/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/abacas/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index bc5440b1..7fe71e3a 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ABACAS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::abacas=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0" - } else { - container "quay.io/biocontainers/abacas:1.3.1--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0' : + 'quay.io/biocontainers/abacas:1.3.1--pl526_0' }" input: tuple val(meta), path(scaffold) @@ -27,12 +16,13 @@ process ABACAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ abacas.pl \\ -r $fasta \\ -q $scaffold \\ - $options.args \\ + $args \\ -o ${prefix}.abacas mv nucmer.delta ${prefix}.abacas.nucmer.delta @@ -40,8 +30,8 @@ process ABACAS { mv nucmer.tiling ${prefix}.abacas.nucmer.tiling mv unused_contigs.out ${prefix}.abacas.unused.contigs.out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') + "${task.process}": + abacas: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/adapterremoval/functions.nf b/modules/adapterremoval/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/adapterremoval/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 6d559826..33955ed2 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ADAPTERREMOVAL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::adapterremoval=2.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0" - } else { - container "quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0' : + 'quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0' }" input: tuple val(meta), path(reads) @@ -26,13 +16,14 @@ process ADAPTERREMOVAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ AdapterRemoval \\ --file1 $reads \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -41,8 +32,8 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else if (!meta.single_end && !meta.collapse) { @@ -50,7 +41,7 @@ process ADAPTERREMOVAL { AdapterRemoval \\ --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -60,8 +51,8 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else { @@ -70,7 +61,7 @@ process ADAPTERREMOVAL { --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ --collapse \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -79,8 +70,8 @@ process ADAPTERREMOVAL { cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } diff --git a/modules/agrvate/functions.nf b/modules/agrvate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/agrvate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index c45bbe06..06392e16 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AGRVATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::agrvate=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -27,15 +16,16 @@ process AGRVATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ agrvate \\ - $options.args \\ + $args \\ -i $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') + "${task.process}": + agrvate: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') END_VERSIONS """ } diff --git a/modules/allelecounter/functions.nf b/modules/allelecounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/allelecounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 8d986579..5cbc4cbd 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ALLELECOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cancerit-allelecount=4.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0" - } else { - container "quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0' : + 'quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0' }" input: tuple val(meta), path(input), path(input_index) @@ -28,20 +17,21 @@ process ALLELECOUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference_options = fasta ? "-r $fasta": "" """ alleleCounter \\ - $options.args \\ + $args \\ -l $loci \\ -b $input \\ $reference_options \\ -o ${prefix}.alleleCount cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(alleleCounter --version) + "${task.process}": + allelecounter: \$(alleleCounter --version) END_VERSIONS """ } diff --git a/modules/amps/functions.nf b/modules/amps/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/amps/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/amps/main.nf b/modules/amps/main.nf index f34423b5..871b57c6 100644 --- a/modules/amps/main.nf +++ b/modules/amps/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AMPS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path maltextract_results @@ -30,6 +19,7 @@ process AMPS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ postprocessing.AMPS.r \\ -r $maltextract_results \\ @@ -37,11 +27,11 @@ process AMPS { -m $filter \\ -t $task.cpus \\ -j \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') + "${task.process}": + amps: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') END_VERSIONS """ } diff --git a/modules/arriba/functions.nf b/modules/arriba/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/arriba/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 6abae233..459ff100 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARRIBA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::arriba=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1" - } else { - container "quay.io/biocontainers/arriba:2.1.0--h3198e80_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1' : + 'quay.io/biocontainers/arriba:2.1.0--h3198e80_1' }" input: tuple val(meta), path(bam) @@ -29,8 +18,9 @@ process ARRIBA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def blacklist = (options.args.contains('-b')) ? '' : '-f blacklist' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def blacklist = (args.contains('-b')) ? '' : '-f blacklist' """ arriba \\ -x $bam \\ @@ -39,11 +29,11 @@ process ARRIBA { -o ${prefix}.fusions.tsv \\ -O ${prefix}.fusions.discarded.tsv \\ $blacklist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') + "${task.process}": + arriba: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') END_VERSIONS """ } diff --git a/modules/artic/guppyplex/functions.nf b/modules/artic/guppyplex/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/guppyplex/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index 87bd99c8..a69e5381 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_GUPPYPLEX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq_dir) @@ -26,18 +15,19 @@ process ARTIC_GUPPYPLEX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ artic \\ guppyplex \\ - $options.args \\ + $args \\ --directory $fastq_dir \\ --output ${prefix}.fastq pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/artic/minion/functions.nf b/modules/artic/minion/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/minion/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 68474f19..86863f95 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_MINION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq) @@ -43,20 +32,21 @@ process ARTIC_MINION { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') - def fast5 = params.fast5_dir ? "--fast5-directory $fast5_dir" : "" - def summary = params.sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" + def fast5 = fast5_dir ? "--fast5-directory $fast5_dir" : "" + def summary = sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" def model = "" - if (options.args.tokenize().contains('--medaka')) { + if (args.tokenize().contains('--medaka')) { fast5 = "" summary = "" - model = file(params.artic_minion_medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $params.artic_minion_medaka_model" + model = file(medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $medaka_model" } """ artic \\ minion \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --read-file $fastq \\ --scheme-directory ./primer-schemes \\ @@ -68,8 +58,8 @@ process ARTIC_MINION { $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/assemblyscan/functions.nf b/modules/assemblyscan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/assemblyscan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf index 5b82f922..7b5b752b 100644 --- a/modules/assemblyscan/main.nf +++ b/modules/assemblyscan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ASSEMBLYSCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::assembly-scan=0.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0' : + 'quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0' }" input: tuple val(meta), path(assembly) @@ -26,13 +15,14 @@ process ASSEMBLYSCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ assembly-scan $assembly > ${prefix}.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) + "${task.process}": + assemblyscan: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) END_VERSIONS """ } diff --git a/modules/ataqv/ataqv/functions.nf b/modules/ataqv/ataqv/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ataqv/ataqv/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf index 5ddade28..39602d30 100644 --- a/modules/ataqv/ataqv/main.nf +++ b/modules/ataqv/ataqv/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ATAQV_ATAQV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ataqv=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2" - } else { - container "quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2' : + 'quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2' }" input: tuple val(meta), path(bam), path(bai), path(peak_file) @@ -30,14 +20,15 @@ process ATAQV_ATAQV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def peak = peak_file ? "--peak-file $peak_file" : '' def tss = tss_file ? "--tss-file $tss_file" : '' def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' def autosom_ref = autosom_ref_file ? "--autosomal-reference-file $autosom_ref_file" : '' """ ataqv \\ - $options.args \\ + $args \\ $peak \\ $tss \\ $excl_regs \\ @@ -49,8 +40,8 @@ process ATAQV_ATAQV { $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( ataqv --version ) + "${task.process}": + ataqv: \$( ataqv --version ) END_VERSIONS """ } diff --git a/modules/bakta/functions.nf b/modules/bakta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bakta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf index 2939f575..20127e53 100644 --- a/modules/bakta/main.nf +++ b/modules/bakta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAKTA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bakta=1.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0' : + 'quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -37,27 +26,28 @@ process BAKTA { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" """ bakta \\ - $options.args \\ + $args \\ --threads $task.cpus \\ - --prefix ${prefix} \\ + --prefix $prefix \\ --db $db \\ $proteins_opt \\ $prodigal_tf \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) END_VERSIONS """ stub: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ touch ${prefix}.embl touch ${prefix}.faa @@ -70,8 +60,8 @@ process BAKTA { touch ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) END_VERSIONS """ } diff --git a/modules/bamaligncleaner/functions.nf b/modules/bamaligncleaner/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamaligncleaner/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index 720b495a..f1481c39 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMALIGNCLEANER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamaligncleaner=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0' : + 'quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BAMALIGNCLEANER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bamAlignCleaner \\ - $options.args \\ + $args \\ -o ${prefix}.bam \\ ${bam} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bamAlignCleaner --version | sed 's/.*version //') + "${task.process}": + bamaligncleaner: \$(bamAlignCleaner --version | sed 's/.*version //') END_VERSIONS """ } diff --git a/modules/bamtools/split/functions.nf b/modules/bamtools/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamtools/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 86eaa5d6..676aab6f 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMTOOLS_SPLIT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamtools=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9" - } else { - container "quay.io/biocontainers/bamtools:2.5.1--h9a82719_9" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9' : + 'quay.io/biocontainers/bamtools:2.5.1--h9a82719_9' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process BAMTOOLS_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bamtools \\ split \\ -in $bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) + "${task.process}": + bamtools: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) END_VERSIONS """ } diff --git a/modules/bamutil/trimbam/functions.nf b/modules/bamutil/trimbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamutil/trimbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf index 60949338..a210fe5f 100644 --- a/modules/bamutil/trimbam/main.nf +++ b/modules/bamutil/trimbam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMUTIL_TRIMBAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamutil=1.0.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1" - } else { - container "quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1' : + 'quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1' }" input: tuple val(meta), path(bam), val(trim_left), val(trim_right) @@ -26,19 +15,20 @@ process BAMUTIL_TRIMBAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bam \\ trimBam \\ $bam \\ ${prefix}.bam \\ - $options.args \\ + $args \\ -L $trim_left \\ -R $trim_right cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) + "${task.process}": + bamutil: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) END_VERSIONS """ } diff --git a/modules/bandage/image/functions.nf b/modules/bandage/image/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bandage/image/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index b7a30a0b..e31566d1 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BANDAGE_IMAGE { tag "${meta.id}" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bandage=0.8.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2" - } else { - container "quay.io/biocontainers/bandage:0.8.1--hc9558a2_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2' : + 'quay.io/biocontainers/bandage:0.8.1--hc9558a2_2' }" input: tuple val(meta), path(gfa) @@ -27,14 +16,15 @@ process BANDAGE_IMAGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - Bandage image $gfa ${prefix}.png $options.args - Bandage image $gfa ${prefix}.svg $options.args + Bandage image $gfa ${prefix}.png $args + Bandage image $gfa ${prefix}.svg $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + bandage: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bbmap/align/functions.nf b/modules/bbmap/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 40810575..ef23fada 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } else { - container "quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" input: tuple val(meta), path(fastq) @@ -28,7 +17,8 @@ process BBMAP_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" @@ -50,14 +40,14 @@ process BBMAP_ALIGN { $db \\ $input \\ out=${prefix}.bam \\ - $options.args \\ + $args \\ threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g \\ &> ${prefix}.bbmap.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bbmap/bbduk/functions.nf b/modules/bbmap/bbduk/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbduk/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index d7243fdb..98a21eab 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBDUK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.90" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1" - } else { - container "quay.io/biocontainers/bbmap:38.90--he522d1c_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1' : + 'quay.io/biocontainers/bbmap:38.90--he522d1c_1' }" input: tuple val(meta), path(reads) @@ -27,7 +17,8 @@ process BBMAP_BBDUK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" def contaminants_fa = contaminants ? "ref=$contaminants" : '' @@ -38,12 +29,12 @@ process BBMAP_BBDUK { $raw \\ $trimmed \\ threads=$task.cpus \\ - $options.args \\ + $args \\ $contaminants_fa \\ &> ${prefix}.bbduk.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bbmap/bbsplit/functions.nf b/modules/bbmap/bbsplit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbsplit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index b2249b17..53f6b1aa 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBSPLIT { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.93" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.93--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.93--he522d1c_0' }" input: tuple val(meta), path(reads) @@ -32,7 +21,8 @@ process BBMAP_BBSPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { @@ -54,11 +44,11 @@ process BBMAP_BBSPLIT { ${other_refs.join(' ')} \\ path=bbsplit \\ threads=$task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } else { @@ -83,11 +73,11 @@ process BBMAP_BBSPLIT { $fastq_in \\ $fastq_out \\ refstats=${prefix}.stats.txt \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } diff --git a/modules/bbmap/index/functions.nf b/modules/bbmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index b9e52ec7..4c02f84e 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_INDEX { tag "$fasta" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bbmap=38.92" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.92--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.92--he522d1c_0' }" input: path fasta @@ -26,16 +15,17 @@ process BBMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bbmap.sh \\ ref=${fasta} \\ - $options.args \\ + $args \\ threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bcftools/concat/functions.nf b/modules/bcftools/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index 48280eea..dbd9d9dc 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" - } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0' : + 'quay.io/biocontainers/bcftools:1.11--h7c999a4_0' }" input: tuple val(meta), path(vcfs) @@ -26,17 +15,18 @@ process BCFTOOLS_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools concat \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcfs} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/consensus/functions.nf b/modules/bcftools/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 954b0eb8..9b9384a6 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' - } else { - container 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(tbi), path(fasta) @@ -26,15 +15,16 @@ process BCFTOOLS_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - cat $fasta | bcftools consensus $vcf $options.args > ${prefix}.fa + cat $fasta | bcftools consensus $vcf $args > ${prefix}.fa header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') sed -i 's/\${header}/${meta.id}/g' ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/filter/functions.nf b/modules/bcftools/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 5323e0fb..87ad3183 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_FILTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,16 +15,17 @@ process BCFTOOLS_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools filter \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/index/functions.nf b/modules/bcftools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf index d67614d8..8f40c683 100644 --- a/modules/bcftools/index/main.nf +++ b/modules/bcftools/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_INDEX { path "versions.yml" , emit: version script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools \\ index \\ - $options.args \\ + $args \\ --threads $task.cpus \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/isec/functions.nf b/modules/bcftools/isec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/isec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index cc3e425e..c4eab09d 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_ISEC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_ISEC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools isec \\ - $options.args \\ + $args \\ -p $prefix \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/merge/functions.nf b/modules/bcftools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index bb68f184..32ad760c 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools merge -Oz \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/mpileup/functions.nf b/modules/bcftools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index df8455a5..9d91193c 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,28 @@ process BCFTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ echo "${meta.id}" > sample_name.list + bcftools mpileup \\ --fasta-ref $fasta \\ - $options.args \\ + $args \\ $bam \\ - | bcftools call --output-type v $options.args2 \\ + | bcftools call --output-type v $args2 \\ | bcftools reheader --samples sample_name.list \\ - | bcftools view --output-file ${prefix}.vcf.gz --output-type z $options.args3 + | bcftools view --output-file ${prefix}.vcf.gz --output-type z $args3 + tabix -p vcf -f ${prefix}.vcf.gz + bcftools stats ${prefix}.vcf.gz > ${prefix}.bcftools_stats.txt + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/norm/functions.nf b/modules/bcftools/norm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/norm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 7e506e49..79ab36e0 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_NORM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_NORM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools norm \\ --fasta-ref ${fasta} \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/query/functions.nf b/modules/bcftools/query/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/query/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index dae8bbc4..1919fa76 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_QUERY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,7 +18,8 @@ process BCFTOOLS_QUERY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" @@ -40,12 +30,12 @@ process BCFTOOLS_QUERY { ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/reheader/functions.nf b/modules/bcftools/reheader/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/reheader/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 953a8adb..3cbe2d8f 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_REHEADER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -28,7 +17,8 @@ process BCFTOOLS_REHEADER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" """ @@ -36,14 +26,14 @@ process BCFTOOLS_REHEADER { reheader \\ $update_sequences \\ $new_header \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ${prefix}.vcf.gz \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/stats/functions.nf b/modules/bcftools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index 31bed814..c66f4453 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_STATS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,12 +15,13 @@ process BCFTOOLS_STATS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bcftools stats $options.args $vcf > ${prefix}.bcftools_stats.txt + bcftools stats $args $vcf > ${prefix}.bcftools_stats.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/view/functions.nf b/modules/bcftools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index ef72f081..b2cbb580 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,25 +18,24 @@ process BCFTOOLS_VIEW { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" - - """ bcftools view \\ --output ${prefix}.vcf.gz \\ ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bedtools/bamtobed/functions.nf b/modules/bedtools/bamtobed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/bamtobed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index 71c439d3..aebf7339 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_BAMTOBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BEDTOOLS_BAMTOBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ bamtobed \\ - $options.args \\ + $args \\ -i $bam \\ | bedtools sort > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/complement/functions.nf b/modules/bedtools/complement/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/complement/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index 77214c64..df44b5bc 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_COMPLEMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_COMPLEMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ complement \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/genomecov/functions.nf b/modules/bedtools/genomecov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/genomecov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index 52f37f23..e2a74ed3 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GENOMECOV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals), val(scale) @@ -28,15 +17,14 @@ process BEDTOOLS_GENOMECOV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args_token = options.args.tokenize() - def args = options.args + def args = task.ext.args ?: '' + def args_list = args.tokenize() args += (scale > 0 && scale != 1) ? " -scale $scale" : "" - - if (!args_token.contains('-bg') && (scale > 0 && scale != 1)) { + if (!args_list.contains('-bg') && (scale > 0 && scale != 1)) { args += " -bg" } + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (intervals.name =~ /\.bam/) { """ bedtools \\ @@ -46,8 +34,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } else { @@ -60,8 +48,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/getfasta/functions.nf b/modules/bedtools/getfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/getfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index b27f6183..c4dae429 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GETFASTA { tag "$bed" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: path bed @@ -27,18 +16,19 @@ process BEDTOOLS_GETFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${bed.baseName}${options.suffix}" : "${bed.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${bed.baseName}${task.ext.suffix}" : "${bed.baseName}" """ bedtools \\ getfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/intersect/functions.nf b/modules/bedtools/intersect/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/intersect/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index 1ab0a8b2..e01c78ac 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_INTERSECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -27,18 +16,19 @@ process BEDTOOLS_INTERSECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ intersect \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/makewindows/functions.nf b/modules/bedtools/makewindows/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/makewindows/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index c9f863d0..cb7d6561 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MAKEWINDOWS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1' : + 'quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1' }" input: tuple val(meta), path(regions) @@ -27,18 +16,19 @@ process BEDTOOLS_MAKEWINDOWS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ bedtools \\ makewindows \\ ${arg_input} \\ - $options.args \\ + $args \\ > ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/maskfasta/functions.nf b/modules/bedtools/maskfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/maskfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 8ee33d7a..77be060c 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MASKFASTA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,18 @@ process BEDTOOLS_MASKFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ maskfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/merge/functions.nf b/modules/bedtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 92a59f9e..907f1c9b 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process BEDTOOLS_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ merge \\ -i $bed \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/slop/functions.nf b/modules/bedtools/slop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/slop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index 4b412b1f..e5d92850 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SLOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_SLOP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ slop \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ diff --git a/modules/bedtools/sort/functions.nf b/modules/bedtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 4a51c4b2..15e69036 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals) @@ -27,17 +16,18 @@ process BEDTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ sort \\ -i $intervals \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/subtract/functions.nf b/modules/bedtools/subtract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/subtract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index 54a12bf4..e645109d 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SUBTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -26,18 +15,19 @@ process BEDTOOLS_SUBTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ subtract \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bismark/align/functions.nf b/modules/bismark/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index aa4879ba..95e7cdfc 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(reads) @@ -29,18 +18,19 @@ process BISMARK_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ bismark \\ $fastq \\ - $options.args \\ + $args \\ --genome $index \\ --bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/deduplicate/functions.nf b/modules/bismark/deduplicate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/deduplicate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index c3ff27d6..c95c54d1 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_DEDUPLICATE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process BISMARK_DEDUPLICATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ deduplicate_bismark \\ - $options.args \\ + $args \\ $seqtype \\ --bam $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/genomepreparation/functions.nf b/modules/bismark/genomepreparation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/genomepreparation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 0a86173d..e096b2b8 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_GENOMEPREPARATION { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path fasta, stageAs: "BismarkIndex/*" @@ -26,14 +15,15 @@ process BISMARK_GENOMEPREPARATION { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark_genome_preparation \\ - $options.args \\ + $args \\ BismarkIndex cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/methylationextractor/functions.nf b/modules/bismark/methylationextractor/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/methylationextractor/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index 5e89e6f8..d99c2b5e 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_METHYLATIONEXTRACTOR { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -31,6 +20,7 @@ process BISMARK_METHYLATIONEXTRACTOR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def seqtype = meta.single_end ? '-s' : '-p' """ bismark_methylation_extractor \\ @@ -39,12 +29,12 @@ process BISMARK_METHYLATIONEXTRACTOR { --gzip \\ --report \\ $seqtype \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/report/functions.nf b/modules/bismark/report/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/report/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index 70c6ba3b..f828ecd8 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_REPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(align_report), path(dedup_report), path(splitting_report), path(mbias) @@ -26,12 +15,13 @@ process BISMARK_REPORT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - bismark2report $options.args + bismark2report $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/summary/functions.nf b/modules/bismark/summary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/summary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index 3d5f294e..72dba72e 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_SUMMARY { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path(bam) @@ -29,12 +18,13 @@ process BISMARK_SUMMARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark2summary cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/blast/blastn/functions.nf b/modules/blast/blastn/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/blastn/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 0d65f1d0..d1bdcf77 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_BLASTN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process BLAST_BLASTN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` blastn \\ -num_threads $task.cpus \\ -db \$DB \\ -query $fasta \\ - $options.args \\ + $args \\ -out ${prefix}.blastn.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/blast/makeblastdb/functions.nf b/modules/blast/makeblastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/makeblastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index 0538e0db..b4c426a4 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_MAKEBLASTDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: path fasta @@ -26,15 +15,16 @@ process BLAST_MAKEBLASTDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ makeblastdb \\ -in $fasta \\ - $options.args + $args mkdir blast_db mv ${fasta}* blast_db cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie/align/functions.nf b/modules/bowtie/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 764b5be2..12188269 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0 bioconda::samtools=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } else { - container 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' : + 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ @@ -40,10 +31,10 @@ process BOWTIE_ALIGN { -x \$INDEX \\ -q \\ $unaligned \\ - $options.args \\ + $args \\ $endedness \\ 2> ${prefix}.out \\ - | samtools view $options.args2 -@ $task.cpus -bS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq ]; then gzip ${prefix}.unmapped.fastq @@ -54,8 +45,8 @@ process BOWTIE_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bowtie/build/functions.nf b/modules/bowtie/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 1b83541b..dbbc8efa 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' - } else { - container 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' : + 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/align/functions.nf b/modules/bowtie2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 6f923951..11c9c20a 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.2 bioconda::samtools=1.11 conda-forge::pigz=2.3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } else { - container "quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' : + 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE2_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' """ @@ -39,13 +30,13 @@ process BOWTIE2_ALIGN { -U $reads \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS @@ -60,9 +51,9 @@ process BOWTIE2_ALIGN { -2 ${reads[1]} \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq.1.gz ]; then mv ${prefix}.unmapped.fastq.1.gz ${prefix}.unmapped_1.fastq.gz @@ -72,8 +63,8 @@ process BOWTIE2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bowtie2/build/functions.nf b/modules/bowtie2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index bc95eea8..c0cbcd79 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' - } else { - container 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' : + 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie2 - bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} + bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bwa/aln/functions.nf b/modules/bwa/aln/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/aln/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index 07135aea..f6cdaefa 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_ALN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8" - } else { - container "quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8' : + 'quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8' }" input: tuple val(meta), path(reads) @@ -27,22 +16,23 @@ process BWA_ALN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.sai \\ \$INDEX \\ ${reads} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } else { @@ -50,22 +40,22 @@ process BWA_ALN { INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.1.sai \\ \$INDEX \\ ${reads[0]} bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.2.sai \\ \$INDEX \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/index/functions.nf b/modules/bwa/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index db1911cb..89102737 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7" - } else { - container "quay.io/biocontainers/bwa:0.7.17--hed695b0_7" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7' : + 'quay.io/biocontainers/bwa:0.7.17--hed695b0_7' }" input: path fasta @@ -26,17 +15,18 @@ process BWA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwa bwa \\ index \\ - $options.args \\ + $args \\ -p bwa/${fasta.baseName} \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/mem/functions.nf b/modules/bwa/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index b6a548d7..9a04ed63 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads) @@ -27,22 +16,24 @@ process BWA_MEM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/sampe/functions.nf b/modules/bwa/sampe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/sampe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 38127793..2abd9335 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa sampe \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/samse/functions.nf b/modules/bwa/samse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/samse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 68fa95c7..56e9127f 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa samse \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwamem2/index/functions.nf b/modules/bwamem2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index 5732017f..e00538c9 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0" - } else { - container "quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0' : + 'quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0' }" input: path fasta @@ -26,16 +15,17 @@ process BWAMEM2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwamem2 bwa-mem2 \\ index \\ - $options.args \\ + $args \\ $fasta -p bwamem2/${fasta} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ } diff --git a/modules/bwamem2/mem/functions.nf b/modules/bwamem2/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index f88d840f..7c238741 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } else { - container "quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' : + 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' }" input: tuple val(meta), path(reads) @@ -27,23 +16,25 @@ process BWAMEM2_MEM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa-mem2 \\ mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwameth/align/functions.nf b/modules/bwameth/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index e15aba6d..06e9da44 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: tuple val(meta), path(reads) @@ -27,7 +16,9 @@ process BWAMETH_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` @@ -37,16 +28,16 @@ process BWAMETH_ALIGN { touch -c -- * bwameth.py \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ --reference \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/bwameth/index/functions.nf b/modules/bwameth/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index 68fb33d4..f5b8ff59 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: path fasta, stageAs: "bwameth/*" @@ -26,12 +15,13 @@ process BWAMETH_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bwameth.py index $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/cat/cat/functions.nf b/modules/cat/cat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/cat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index dac301cb..0c087270 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_CAT { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::pigz=2.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pigz:2.3.4" - } else { - container "quay.io/biocontainers/pigz:2.3.4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pigz:2.3.4' : + 'quay.io/biocontainers/pigz:2.3.4' }" input: path files_in @@ -26,6 +15,8 @@ process CAT_CAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def file_list = files_in.collect { it.toString() } if (file_list.size > 1) { @@ -39,16 +30,16 @@ process CAT_CAT { def in_zip = file_list[0].endsWith('.gz') def out_zip = file_out.endsWith('.gz') def command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' - def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $options.args2" : '' + def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $args2" : '' """ $command1 \\ - $options.args \\ + $args \\ ${file_list.join(' ')} \\ $command2 \\ > $file_out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/cat/fastq/functions.nf b/modules/cat/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index 538915a7..b6be93b0 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'merged_fastq', meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: tuple val(meta), path(reads) @@ -26,7 +15,8 @@ process CAT_FASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def readList = reads.collect{ it.toString() } if (meta.single_end) { if (readList.size > 1) { @@ -34,8 +24,8 @@ process CAT_FASTQ { cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } @@ -49,8 +39,8 @@ process CAT_FASTQ { cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/cellranger/mkref/functions.nf b/modules/cellranger/mkref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cellranger/mkref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cellranger/mkref/main.nf b/modules/cellranger/mkref/main.nf index 22ad66ba..c5d83ac9 100644 --- a/modules/cellranger/mkref/main.nf +++ b/modules/cellranger/mkref/main.nf @@ -1,15 +1,6 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CELLRANGER_MKREF { tag 'mkref' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } if (params.enable_conda) { exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." @@ -19,22 +10,24 @@ process CELLRANGER_MKREF { input: path fasta path gtf - val(reference_name) + val reference_name output: - path "versions.yml" , emit: versions path "${reference_name}", emit: reference + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - cellranger mkref \\ - --genome=${reference_name} \\ - --fasta=${fasta} \\ - --genes=${gtf} + cellranger \\ + mkref \\ + --genome=$reference_name \\ + --fasta=$fasta \\ + --genes=$gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) END_VERSIONS """ } diff --git a/modules/checkm/lineagewf/functions.nf b/modules/checkm/lineagewf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/checkm/lineagewf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf index e655e5f5..119ee491 100644 --- a/modules/checkm/lineagewf/main.nf +++ b/modules/checkm/lineagewf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CHECKM_LINEAGEWF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::checkm-genome=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1" - } else { - container "quay.io/biocontainers/checkm-genome:1.1.3--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1' : + 'quay.io/biocontainers/checkm-genome:1.1.3--py_1' }" input: tuple val(meta), path(fasta) @@ -28,7 +17,8 @@ process CHECKM_LINEAGEWF { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ checkm \\ lineage_wf \\ @@ -37,13 +27,13 @@ process CHECKM_LINEAGEWF { --tab_table \\ --pplacer_threads $task.cpus \\ -x $fasta_ext \\ - $options.args \\ + $args \\ . \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) + "${task.process}": + checkm: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) END_VERSIONS """ } diff --git a/modules/chromap/chromap/functions.nf b/modules/chromap/chromap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/chromap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 9826eed1..f6686cf2 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1' // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: tuple val(meta), path(reads) @@ -37,34 +26,37 @@ process CHROMAP_CHROMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() - def file_extension = options.args.contains("--SAM") ? 'sam' : options.args.contains("--TagAlign")? 'tagAlign' : options.args.contains("--pairs")? 'pairs' : 'bed' + def file_extension = args.contains("--SAM") ? 'sam' : args.contains("--TagAlign")? 'tagAlign' : args.contains("--pairs")? 'pairs' : 'bed' if (barcodes) { - args << "-b ${barcodes.join(',')}" + args_list << "-b ${barcodes.join(',')}" if (whitelist) { - args << "--barcode-whitelist $whitelist" + args_list << "--barcode-whitelist $whitelist" } } if (chr_order) { - args << "--chr-order $chr_order" + args_list << "--chr-order $chr_order" } if (pairs_chr_order){ - args << "--pairs-natural-chr-order $pairs_chr_order" + args_list << "--pairs-natural-chr-order $pairs_chr_order" } - def final_args = args.join(' ') + def final_args = args_list.join(' ') def compression_cmds = "gzip ${prefix}.${file_extension}" - if (options.args.contains("--SAM")) { + if (args.contains("--SAM")) { compression_cmds = """ - samtools view $options.args2 -@ ${task.cpus} -bh \\ + samtools view $args2 -@ $task.cpus -bh \\ -o ${prefix}.bam ${prefix}.${file_extension} rm ${prefix}.${file_extension} """ } if (meta.single_end) { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -74,13 +66,14 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } else { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -91,8 +84,8 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } diff --git a/modules/chromap/index/functions.nf b/modules/chromap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index efe85733..cafeca2f 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = 0.1 // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_INDEX { tag '$fasta' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: path fasta @@ -28,18 +17,19 @@ process CHROMAP_INDEX { path "versions.yml", emit: versions script: - def prefix = fasta.baseName + def args = task.ext.args ?: '' + def prefix = fasta.baseName """ chromap \\ -i \\ - $options.args \\ + $args \\ -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/clonalframeml/functions.nf b/modules/clonalframeml/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/clonalframeml/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf index f99f944b..60eaad12 100644 --- a/modules/clonalframeml/main.nf +++ b/modules/clonalframeml/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CLONALFRAMEML { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::clonalframeml=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1" - } else { - container "quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1' : + 'quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1' }" input: tuple val(meta), path(newick), path(msa) @@ -31,17 +20,18 @@ process CLONALFRAMEML { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ClonalFrameML \\ $newick \\ <(gzip -cdf $msa) \\ $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) + "${task.process}": + clonalframeml: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) END_VERSIONS """ } diff --git a/modules/cmseq/polymut/functions.nf b/modules/cmseq/polymut/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cmseq/polymut/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf index 4c061e26..18bb8c59 100644 --- a/modules/cmseq/polymut/main.nf +++ b/modules/cmseq/polymut/main.nf @@ -1,23 +1,13 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.0.4' +def VERSION = '1.0.4' // Version information not provided by tool on CLI process CMSEQ_POLYMUT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cmseq=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0" - } else { - container "quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0' : + 'quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0' }" input: tuple val(meta), path(bam), path(bai), path(gff), path(fasta) @@ -27,20 +17,21 @@ process CMSEQ_POLYMUT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fasta_refid = fasta ? "-c $fasta" : "" def sortindex = bai ? "" : "--sortindex" """ polymut.py \\ - $options.args \\ + $args \\ $sortindex \\ $fasta_refid \\ --gff_file $gff \\ $bam > ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) + "${task.process}": + cmseq: $VERSION END_VERSIONS """ } diff --git a/modules/cnvkit/batch/functions.nf b/modules/cnvkit/batch/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/cnvkit/batch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cnvkit/batch/main.nf b/modules/cnvkit/batch/main.nf index 06ecaa40..811cb409 100644 --- a/modules/cnvkit/batch/main.nf +++ b/modules/cnvkit/batch/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CNVKIT_BATCH { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cnvkit=0.9.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0' : + 'quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0' }" input: tuple val(meta), path(tumor), path(normal) @@ -32,18 +21,18 @@ process CNVKIT_BATCH { path "versions.yml" , emit: versions script: - normal_args = normal ? "--normal $normal" : "" - fasta_args = fasta ? "--fasta $fasta" : "" - reference_args = reference ? "--reference $reference" : "" + def args = task.ext.args ?: '' + def normal_args = normal ? "--normal $normal" : "" + def fasta_args = fasta ? "--fasta $fasta" : "" + def reference_args = reference ? "--reference $reference" : "" def target_args = "" - if (options.args.contains("--method wgs") || options.args.contains("-m wgs")) { + if (args.contains("--method wgs") || args.contains("-m wgs")) { target_args = targets ? "--targets $targets" : "" } else { target_args = "--targets $targets" } - """ cnvkit.py \\ batch \\ @@ -52,12 +41,12 @@ process CNVKIT_BATCH { $fasta_args \\ $reference_args \\ $target_args \\ - --processes ${task.cpus} \\ - $options.args + --processes $task.cpus \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cnvkit.py version | sed -e "s/cnvkit v//g") + "${task.process}": + cnvkit: \$(cnvkit.py version | sed -e "s/cnvkit v//g") END_VERSIONS """ } diff --git a/modules/cooler/cload/functions.nf b/modules/cooler/cload/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/cload/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf index ec0cad56..ed7a41a1 100644 --- a/modules/cooler/cload/main.nf +++ b/modules/cooler/cload/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_CLOAD { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(pairs), path(index) @@ -28,20 +17,21 @@ process COOLER_CLOAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def nproc = options.args.contains('pairix') || options.args.contains('tabix')? "--nproc ${task.cpus}" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def nproc = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : '' """ cooler cload \\ - $options.args \\ + $args \\ $nproc \\ ${chromsizes}:${cool_bin} \\ $pairs \\ ${prefix}.${cool_bin}.cool cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/digest/functions.nf b/modules/cooler/digest/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/digest/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index 5728b649..9658ec31 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DIGEST { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: path fasta @@ -28,17 +17,18 @@ process COOLER_DIGEST { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ cooler digest \\ - $options.args \\ + $args \\ -o "${fasta.baseName}_${enzyme.replaceAll(/[^0-9a-zA-Z]+/, '_')}.bed" \\ $chromsizes \\ $fasta \\ $enzyme cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/functions.nf b/modules/cooler/dump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/dump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 1ca11c7d..0836640e 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DUMP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -27,17 +16,18 @@ process COOLER_DUMP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ - $options.args \\ + $args \\ -o ${prefix}.bedpe \\ $cool$suffix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index 659b06a1..a9d1afd5 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -21,6 +21,9 @@ input: type: file description: Path to COOL file pattern: "*.{cool,mcool}" + - resolution: + type: value + description: Resolution output: - meta: diff --git a/modules/cooler/merge/functions.nf b/modules/cooler/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf index b15439a4..0fed76c9 100644 --- a/modules/cooler/merge/main.nf +++ b/modules/cooler/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_MERGE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -26,16 +15,17 @@ process COOLER_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ cooler merge \\ - $options.args \\ + $args \\ ${prefix}.cool \\ ${cool} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/zoomify/functions.nf b/modules/cooler/zoomify/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/zoomify/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf index 3f1ed4e7..e61ca99d 100644 --- a/modules/cooler/zoomify/main.nf +++ b/modules/cooler/zoomify/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_ZOOMIFY { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -26,17 +15,18 @@ process COOLER_ZOOMIFY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ cooler zoomify \\ - $options.args \\ + $args \\ -n $task.cpus \\ -o ${prefix}.mcool \\ $cool cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/csvtk/concat/functions.nf b/modules/csvtk/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/csvtk/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index 194b1e14..745a9ac4 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CSVTK_CONCAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" input: tuple val(meta), path(csv) @@ -28,14 +17,15 @@ process CSVTK_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' """ csvtk \\ concat \\ - $options.args \\ + $args \\ --num-cpus $task.cpus \\ --delimiter "${delimiter}" \\ --out-delimiter "${out_delimiter}" \\ @@ -43,7 +33,7 @@ process CSVTK_CONCAT { $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": csvtk: \$(echo \$( csvtk version | sed -e "s/csvtk v//g" )) END_VERSIONS """ diff --git a/modules/csvtk/split/functions.nf b/modules/csvtk/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/csvtk/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf index 727e046a..89b44154 100644 --- a/modules/csvtk/split/main.nf +++ b/modules/csvtk/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CSVTK_SPLIT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" input: tuple val(meta), path(csv) @@ -28,7 +17,8 @@ process CSVTK_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' @@ -36,15 +26,15 @@ process CSVTK_SPLIT { sed -i.bak '/^##/d' $csv csvtk \\ split \\ - $options.args \\ + $args \\ --num-cpus $task.cpus \\ $delimiter \\ $out_delimiter \\ $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) + "${task.process}": + csvtk: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) END_VERSIONS """ } diff --git a/modules/custom/dumpsoftwareversions/functions.nf b/modules/custom/dumpsoftwareversions/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/custom/dumpsoftwareversions/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index faf2073f..934bb467 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path versions @@ -27,80 +16,6 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { path "versions.yml" , emit: versions script: - """ - #!/usr/bin/env python - - import yaml - import platform - from textwrap import dedent - - def _make_versions_html(versions): - html = [ - dedent( - '''\\ - - - - - - - - - - ''' - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f'''\\ - - - - - - ''' - ) - ) - html.append("") - html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") - return "\\n".join(html) - - module_versions = {} - module_versions["${getProcessName(task.process)}"] = { - 'python': platform.python_version(), - 'yaml': yaml.__version__ - } - - with open("$versions") as f: - workflow_versions = yaml.load(f, Loader=yaml.BaseLoader) | module_versions - - workflow_versions["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version" - } - - versions_mqc = { - 'id': 'software_versions', - 'section_name': '${workflow.manifest.name} Software Versions', - 'section_href': 'https://github.com/${workflow.manifest.name}', - 'plot_type': 'html', - 'description': 'are collected at run time from the software output.', - 'data': _make_versions_html(workflow_versions) - } - - with open("software_versions.yml", 'w') as f: - yaml.dump(workflow_versions, f, default_flow_style=False) - with open("software_versions_mqc.yml", 'w') as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - - with open('versions.yml', 'w') as f: - yaml.dump(module_versions, f, default_flow_style=False) - """ + def args = task.ext.args ?: '' + template 'dumpsoftwareversions.py' } diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml index c8310e35..5b5b8a60 100644 --- a/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -31,3 +31,4 @@ output: authors: - "@drpatelh" + - "@grst" diff --git a/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py new file mode 100644 index 00000000..d1390392 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +import yaml +import platform +from textwrap import dedent + + +def _make_versions_html(versions): + html = [ + dedent( + """\\ + + + + + + + + + + """ + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f"""\\ + + + + + + """ + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + +versions_this_module = {} +versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, +} + +with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + +# aggregate versions by the module name (derived from fully-qualified process name) +versions_by_module = {} +for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + assert versions_by_module[module] == process_versions, ( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + +versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", +} + +versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), +} + +with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) +with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + +with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) diff --git a/modules/custom/getchromsizes/functions.nf b/modules/custom/getchromsizes/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/custom/getchromsizes/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf index fb46986b..270b3f48 100644 --- a/modules/custom/getchromsizes/main.nf +++ b/modules/custom/getchromsizes/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUSTOM_GETCHROMSIZES { tag "$fasta" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: path fasta @@ -27,13 +16,14 @@ process CUSTOM_GETCHROMSIZES { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ samtools faidx $fasta cut -f 1,2 ${fasta}.fai > ${fasta}.sizes cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + custom: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/cutadapt/functions.nf b/modules/cutadapt/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cutadapt/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index 32faf2cf..f98113e8 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUTADAPT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cutadapt=3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' - } else { - container 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' : + 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' }" input: tuple val(meta), path(reads) @@ -27,18 +16,19 @@ process CUTADAPT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ cutadapt \\ --cores $task.cpus \\ - $options.args \\ + $args \\ $trimmed \\ $reads \\ > ${prefix}.cutadapt.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cutadapt --version) + "${task.process}": + cutadapt: \$(cutadapt --version) END_VERSIONS """ } diff --git a/modules/damageprofiler/functions.nf b/modules/damageprofiler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/damageprofiler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index 3800a305..da37909e 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DAMAGEPROFILER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::damageprofiler=1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2" - } else { - container "quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2' : + 'quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -29,23 +18,21 @@ process DAMAGEPROFILER { path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "-r $fasta" : "" def species_list = specieslist ? "-sf $specieslist" : "" - """ damageprofiler \\ - -i $bam \\ - -o $prefix/ \\ - $options.args \\ - $reference \\ - $species_list + -i $bam \\ + -o $prefix/ \\ + $args \\ + $reference \\ + $species_list cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(damageprofiler -v | sed 's/^DamageProfiler v//') + "${task.process}": + damageprofiler: \$(damageprofiler -v | sed 's/^DamageProfiler v//') END_VERSIONS """ - } diff --git a/modules/dastool/dastool/functions.nf b/modules/dastool/dastool/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dastool/dastool/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index dff32294..b67ee993 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DASTOOL_DASTOOL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" - } else { - container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" input: tuple val(meta), path(contigs), path(bins) @@ -37,7 +27,8 @@ process DASTOOL_DASTOOL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bin_list = bins instanceof List ? bins.join(",") : "$bins" def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" def db_dir = db_directory ? "--db_directory $db_directory" : "" @@ -56,7 +47,7 @@ process DASTOOL_DASTOOL { $decompress_contigs DAS_Tool \\ - $options.args \\ + $args \\ $proteins_pred \\ $db_dir \\ $engine \\ @@ -66,8 +57,8 @@ process DASTOOL_DASTOOL { -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) END_VERSIONS """ } diff --git a/modules/dastool/scaffolds2bin/functions.nf b/modules/dastool/scaffolds2bin/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dastool/scaffolds2bin/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf index b51a6e6e..78a06b6e 100644 --- a/modules/dastool/scaffolds2bin/main.nf +++ b/modules/dastool/scaffolds2bin/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DASTOOL_SCAFFOLDS2BIN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" - } else { - container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,21 +16,22 @@ process DASTOOL_SCAFFOLDS2BIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def file_extension = extension ? extension : "fasta" """ gunzip -f *.${file_extension}.gz Fasta_to_Scaffolds2Bin.sh \\ - $options.args \\ + $args \\ -i . \\ -e $file_extension \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) END_VERSIONS """ } diff --git a/modules/dedup/functions.nf b/modules/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf index 62d720f6..60fc376e 100644 --- a/modules/dedup/main.nf +++ b/modules/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEDUP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dedup=0.12.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1" - } else { - container "quay.io/biocontainers/dedup:0.12.8--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1' : + 'quay.io/biocontainers/dedup:0.12.8--hdfd78af_1' }" input: tuple val(meta), path(bam) @@ -29,18 +18,19 @@ process DEDUP { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dedup \\ -Xmx${task.memory.toGiga()}g \\ -i $bam \\ -o . \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') + "${task.process}": + dedup: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') END_VERSIONS """ diff --git a/modules/deeptools/computematrix/functions.nf b/modules/deeptools/computematrix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/computematrix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index 9fffdb8e..e39310f4 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_COMPUTEMATRIX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bigwig) @@ -28,10 +17,11 @@ process DEEPTOOLS_COMPUTEMATRIX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ computeMatrix \\ - $options.args \\ + $args \\ --regionsFileName $bed \\ --scoreFileName $bigwig \\ --outFileName ${prefix}.computeMatrix.mat.gz \\ @@ -39,8 +29,8 @@ process DEEPTOOLS_COMPUTEMATRIX { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(computeMatrix --version | sed -e "s/computeMatrix //g") + "${task.process}": + deeptools: \$(computeMatrix --version | sed -e "s/computeMatrix //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotfingerprint/functions.nf b/modules/deeptools/plotfingerprint/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotfingerprint/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index b2d167f9..aeb635ce 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTFINGERPRINT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bams), path(bais) @@ -28,11 +17,12 @@ process DEEPTOOLS_PLOTFINGERPRINT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ plotFingerprint \\ - $options.args \\ + $args \\ $extend \\ --bamfiles ${bams.join(' ')} \\ --plotFile ${prefix}.plotFingerprint.pdf \\ @@ -41,8 +31,8 @@ process DEEPTOOLS_PLOTFINGERPRINT { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") + "${task.process}": + deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotheatmap/functions.nf b/modules/deeptools/plotheatmap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotheatmap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 19c243df..f981744e 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTHEATMAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTHEATMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plotHeatmap \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotHeatmap.pdf \\ --outFileNameMatrix ${prefix}.plotHeatmap.mat.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") + "${task.process}": + deeptools: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotprofile/functions.nf b/modules/deeptools/plotprofile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotprofile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index 3a196bd5..b32e04d3 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTPROFILE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTPROFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plotProfile \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotProfile.pdf \\ --outFileNameData ${prefix}.plotProfile.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotProfile --version | sed -e "s/plotProfile //g") + "${task.process}": + deeptools: \$(plotProfile --version | sed -e "s/plotProfile //g") END_VERSIONS """ } diff --git a/modules/delly/call/functions.nf b/modules/delly/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/delly/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index 59979dc9..d4aa1adb 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DELLY_CALL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::delly=0.8.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1" - } else { - container "quay.io/biocontainers/delly:0.8.7--he03298f_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1' : + 'quay.io/biocontainers/delly:0.8.7--he03298f_1' }" input: tuple val(meta), path(bam), path(bai) @@ -29,18 +18,19 @@ process DELLY_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ delly \\ call \\ - $options.args \\ + $args \\ -o ${prefix}.bcf \\ -g $fasta \\ $bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') + "${task.process}": + delly: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') END_VERSIONS """ } diff --git a/modules/diamond/blastp/functions.nf b/modules/diamond/blastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 6afc66c4..015be864 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTP { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/blastx/functions.nf b/modules/diamond/blastx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index db2953da..f4018aa9 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTX { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/makedb/functions.nf b/modules/diamond/makedb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/makedb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index e4533f8f..cccfcce9 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_MAKEDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? 'bioconda::diamond=2.0.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: path fasta @@ -28,17 +17,18 @@ process DIAMOND_MAKEDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ diamond \\ makedb \\ --threads $task.cpus \\ --in $fasta \\ -d $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/dragonflye/functions.nf b/modules/dragonflye/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dragonflye/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index f9dc9004..8ca98832 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DRAGONFLYE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dragonflye=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0" - } else { - container "quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0' : + 'quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,18 +19,19 @@ process DRAGONFLYE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ dragonflye \\ --reads ${reads} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) + "${task.process}": + dragonflye: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) END_VERSIONS """ } diff --git a/modules/dshbio/exportsegments/functions.nf b/modules/dshbio/exportsegments/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/exportsegments/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index ec471000..7cc5da22 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_EXPORTSEGMENTS { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gfa) @@ -26,17 +15,18 @@ process DSHBIO_EXPORTSEGMENTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ export-segments \\ - $options.args \\ + $args \\ -i $gfa \\ -o ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filterbed/functions.nf b/modules/dshbio/filterbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filterbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 9ad8ce8b..065d8bec 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process DSHBIO_FILTERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ filter-bed \\ - $options.args \\ + $args \\ -i $bed \\ -o ${prefix}.bed.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filtergff3/functions.nf b/modules/dshbio/filtergff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filtergff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bf729dbf..c738c95a 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,17 +15,18 @@ process DSHBIO_FILTERGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ filter-gff3 \\ - $options.args \\ + $args \\ -i $gff3 \\ -o ${prefix}.gff3.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitbed/functions.nf b/modules/dshbio/splitbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 20e679f4..60b8b7a3 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,18 +15,19 @@ process DSHBIO_SPLITBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ split-bed \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.bed.gz' \\ -i $bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitgff3/functions.nf b/modules/dshbio/splitgff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitgff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index e0312a19..7ad2fd08 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,18 +15,19 @@ process DSHBIO_SPLITGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ split-gff3 \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.gff3.gz' \\ -i $gff3 cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/ectyper/functions.nf b/modules/ectyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ectyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf index b5d8202d..5f458eb9 100644 --- a/modules/ectyper/main.nf +++ b/modules/ectyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ECTYPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ectyper=1.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1' : + 'quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -28,7 +17,8 @@ process ECTYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed = fasta.getName().endsWith(".gz") ? true : false def fasta_name = fasta.getName().replace(".gz", "") """ @@ -37,15 +27,16 @@ process ECTYPER { fi ectyper \\ - $options.args \\ + $args \\ --cores $task.cpus \\ --output ./ \\ --input $fasta_name + mv output.tsv ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') + "${task.process}": + ectyper: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/emmtyper/functions.nf b/modules/emmtyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/emmtyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf index 74624c1f..9cf98694 100644 --- a/modules/emmtyper/main.nf +++ b/modules/emmtyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process EMMTYPER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::emmtyper=0.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0" - } else { - container "quay.io/biocontainers/emmtyper:0.2.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0' : + 'quay.io/biocontainers/emmtyper:0.2.0--py_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process EMMTYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ emmtyper \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) + "${task.process}": + emmtyper: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) END_VERSIONS """ } diff --git a/modules/ensemblvep/functions.nf b/modules/ensemblvep/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ensemblvep/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index ad9c38a6..76cd9235 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,26 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.vep_tag = "" - process ENSEMBLVEP { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0" - } else { - container "quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0" - } + if (task.ext.use_cache) { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" } else { - container "nfcore/vep:${params.vep_tag}" + container "nfcore/vep:${task.ext.vep_tag}" } input: @@ -36,15 +23,16 @@ process ENSEMBLVEP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - dir_cache = params.use_cache ? "\${PWD}/${cache}" : "/.vep" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix vep \\ -i $vcf \\ -o ${prefix}.ann.vcf \\ - $options.args \\ + $args \\ --assembly $genome \\ --species $species \\ --cache \\ @@ -57,8 +45,8 @@ process ENSEMBLVEP { rm -rf $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') + "${task.process}": + ensemblvep: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') END_VERSIONS """ } diff --git a/modules/expansionhunter/functions.nf b/modules/expansionhunter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/expansionhunter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 845de15d..2ef00d17 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process EXPANSIONHUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::expansionhunter=4.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0" - } else { - container "quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0' : + 'quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,11 +17,12 @@ process EXPANSIONHUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ ExpansionHunter \\ - $options.args \\ + $args \\ --reads $bam \\ --output-prefix $prefix \\ --reference $fasta \\ @@ -40,8 +30,8 @@ process EXPANSIONHUNTER { --sex $gender cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') + "${task.process}": + expansionhunter: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') END_VERSIONS """ } diff --git a/modules/fargene/functions.nf b/modules/fargene/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fargene/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index f2afe4be..5bf1c604 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1' +def VERSION = '0.1' // Version information not provided by tool on CLI process FARGENE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fargene=0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4" - } else { - container "quay.io/biocontainers/fargene:0.1--py27h21c881e_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4' : + 'quay.io/biocontainers/fargene:0.1--py27h21c881e_4' }" input: // input may be fasta (for genomes or longer contigs) or paired-end fastq (for metagenome), the latter in addition with --meta flag @@ -43,21 +32,22 @@ process FARGENE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gzip \\ -cdf $input \\ > unziped.fa | fargene \\ - $options.args \\ + $args \\ -p $task.cpus \\ -i unziped.fa \\ --hmm-model $hmm_model \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + fargene: $VERSION END_VERSIONS """ } diff --git a/modules/fastani/functions.nf b/modules/fastani/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastani/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 5c6366f9..7e3721bd 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTANI { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastani=1.32" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0" - } else { - container "quay.io/biocontainers/fastani:1.32--he1c1bb9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0' : + 'quay.io/biocontainers/fastani:1.32--he1c1bb9_0' }" input: tuple val(meta), path(query) @@ -27,7 +16,8 @@ process FASTANI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.batch_input) { """ @@ -37,8 +27,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } else { @@ -49,8 +39,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } diff --git a/modules/fastp/functions.nf b/modules/fastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index e99540d5..05eb1e98 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::fastp=0.20.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' - } else { - container 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' : + 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' }" input: tuple val(meta), path(reads) @@ -33,8 +22,9 @@ process FASTP { tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged script: + def args = task.ext.args ?: '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ @@ -46,11 +36,11 @@ process FASTP { --json ${prefix}.fastp.json \\ --html ${prefix}.fastp.html \\ $fail_fastq \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } else { @@ -70,12 +60,12 @@ process FASTP { $merge_fastq \\ --thread $task.cpus \\ --detect_adapter_for_pe \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } diff --git a/modules/fastqc/functions.nf b/modules/fastqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 9f6cfc55..673a00b8 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" - } else { - container "quay.io/biocontainers/fastqc:0.11.9--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : + 'quay.io/biocontainers/fastqc:0.11.9--0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process FASTQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz + fastqc $args --threads $task.cpus ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz + fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } diff --git a/modules/fastqscan/functions.nf b/modules/fastqscan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastqscan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf index 0106892f..768728f2 100644 --- a/modules/fastqscan/main.nf +++ b/modules/fastqscan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQSCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastq-scan=0.4.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0" - } else { - container "quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0' : + 'quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0' }" input: tuple val(meta), path(reads) @@ -26,15 +15,16 @@ process FASTQSCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ zcat $reads | \\ fastq-scan \\ - $options.args > ${prefix}.json + $args > ${prefix}.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) + "${task.process}": + fastqscan: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) END_VERSIONS """ } diff --git a/modules/fasttree/functions.nf b/modules/fasttree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fasttree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fasttree/main.nf b/modules/fasttree/main.nf index 5f81d1f2..5e57aae9 100644 --- a/modules/fasttree/main.nf +++ b/modules/fasttree/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTTREE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::fasttree=2.1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4" - } else { - container "quay.io/biocontainers/fasttree:2.1.10--h516909a_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4' : + 'quay.io/biocontainers/fasttree:2.1.10--h516909a_4' }" input: path alignment @@ -25,16 +14,17 @@ process FASTTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ fasttree \\ - $options.args \\ + $args \\ -log fasttree_phylogeny.tre.log \\ -nt $alignment \\ > fasttree_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') + "${task.process}": + fasttree: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') END_VERSIONS """ } diff --git a/modules/fgbio/callmolecularconsensusreads/functions.nf b/modules/fgbio/callmolecularconsensusreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/callmolecularconsensusreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index 23056b90..f514b69a 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_CALLMOLECULARCONSENSUSREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,17 +15,18 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ fgbio \\ CallMolecularConsensusReads \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/fastqtobam/functions.nf b/modules/fgbio/fastqtobam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/fastqtobam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf index 68a85508..40713d03 100644 --- a/modules/fgbio/fastqtobam/main.nf +++ b/modules/fgbio/fastqtobam/main.nf @@ -1,51 +1,39 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_FASTQTOBAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(reads) - val(read_structure) + val read_structure output: tuple val(meta), path("*_umi_converted.bam"), emit: umibam path "versions.yml" , emit: version script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - mkdir tmpFolder + mkdir tmp fgbio \\ - --tmp-dir=${PWD}/tmpFolder \\ + --tmp-dir=${PWD}/tmp \\ FastqToBam \\ -i $reads \\ -o "${prefix}_umi_converted.bam" \\ --read-structures $read_structure \\ --sample $meta.id \\ --library $meta.id \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/groupreadsbyumi/functions.nf b/modules/fgbio/groupreadsbyumi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/groupreadsbyumi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf index 8e16f0a5..b35186a5 100644 --- a/modules/fgbio/groupreadsbyumi/main.nf +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_GROUPREADSBYUMI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(taggedbam) @@ -28,7 +17,8 @@ process FGBIO_GROUPREADSBYUMI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir tmp @@ -37,14 +27,14 @@ process FGBIO_GROUPREADSBYUMI { --tmp-dir=${PWD}/tmp \\ GroupReadsByUmi \\ -s $strategy \\ - ${options.args} \\ + $args \\ -i $taggedbam \\ -o ${prefix}_umi-grouped.bam \\ -f ${prefix}_umi_histogram.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/sortbam/functions.nf b/modules/fgbio/sortbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/sortbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 34e0b377..c2822548 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_SORTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,16 +15,17 @@ process FGBIO_SORTBAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ fgbio \\ SortBam \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/filtlong/functions.nf b/modules/filtlong/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/filtlong/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf index 6e82f112..10e147a6 100644 --- a/modules/filtlong/main.nf +++ b/modules/filtlong/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FILTLONG { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::filtlong=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0" - } else { - container "quay.io/biocontainers/filtlong:0.2.1--h9a82719_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0' : + 'quay.io/biocontainers/filtlong:0.2.1--h9a82719_0' }" input: tuple val(meta), path(shortreads), path(longreads) @@ -26,18 +15,19 @@ process FILTLONG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" """ filtlong \\ $short_reads \\ - $options.args \\ + $args \\ $longreads \\ | gzip -n > ${prefix}_lr_filtlong.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( filtlong --version | sed -e "s/Filtlong v//g" ) + "${task.process}": + filtlong: \$( filtlong --version | sed -e "s/Filtlong v//g" ) END_VERSIONS """ } diff --git a/modules/flash/functions.nf b/modules/flash/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/flash/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 912b2961..23bd1892 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FLASH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::flash=1.2.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5" - } else { - container "quay.io/biocontainers/flash:1.2.11--hed695b0_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5' : + 'quay.io/biocontainers/flash:1.2.11--hed695b0_5' }" input: tuple val(meta), path(reads) @@ -25,18 +14,19 @@ process FLASH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ flash \\ - $options.args \\ + $args \\ -o ${prefix} \\ -z \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') + "${task.process}": + flash: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/freebayes/functions.nf b/modules/freebayes/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/freebayes/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf index 0b23dc40..b9a63d02 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/main.nf @@ -1,38 +1,28 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FREEBAYES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" - } else { - container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3' : + 'quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3' }" input: tuple val(meta), path(input_1), path(input_1_index), path(input_2), path(input_2_index) path fasta - path fai + path fasta_fai path targets path samples path populations path cnv output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - path "versions.yml" , emit: versions + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" @@ -42,20 +32,20 @@ process FREEBAYES { if (task.cpus > 1) { """ freebayes-parallel \\ - <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ + <(fasta_generate_regions.py $fasta_fai 10000) $task.cpus \\ -f $fasta \\ $targets_file \\ $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ @@ -67,14 +57,14 @@ process FREEBAYES { $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ } diff --git a/modules/freebayes/meta.yml b/modules/freebayes/meta.yml index 75d44826..abba1daa 100644 --- a/modules/freebayes/meta.yml +++ b/modules/freebayes/meta.yml @@ -36,10 +36,10 @@ input: type: file description: reference fasta file pattern: ".{fa,fa.gz,fasta,fasta.gz}" - - fai: + - fasta_fai: type: file description: reference fasta file index - pattern: "*.fai" + pattern: "*.{fa,fasta}.fai" - targets: type: file description: Optional - Limit analysis to targets listed in this BED-format FILE. @@ -55,8 +55,7 @@ input: - cnv: type: file description: | - A copy number map BED file, which has - either a sample-level ploidy: + A copy number map BED file, which has either a sample-level ploidy: sample_name copy_number or a region-specific format: seq_name start end sample_name copy_number diff --git a/modules/gatk4/applybqsr/functions.nf b/modules/gatk4/applybqsr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/applybqsr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index c89a4a4d..f93dd574 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_APPLYBQSR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index), path(bqsr_table) @@ -30,7 +19,8 @@ process GATK4_APPLYBQSR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -45,11 +35,11 @@ process GATK4_APPLYBQSR { $interval \\ --tmp-dir . \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/baserecalibrator/functions.nf b/modules/gatk4/baserecalibrator/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/baserecalibrator/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index ce6f5906..b422a798 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BASERECALIBRATOR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) @@ -32,7 +21,8 @@ process GATK4_BASERECALIBRATOR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') @@ -48,12 +38,12 @@ process GATK4_BASERECALIBRATOR { $sitesCommand \ $intervalsCommand \ --tmp-dir . \ - $options.args \ + $args \ -O ${prefix}.table cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/bedtointervallist/functions.nf b/modules/gatk4/bedtointervallist/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/bedtointervallist/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 7c06ccef..77819a0f 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BEDTOINTERVALLIST { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,18 @@ process GATK4_BEDTOINTERVALLIST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk BedToIntervalList \\ -I $bed \\ -SD $sequence_dict \\ -O ${prefix}.interval_list \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/calculatecontamination/functions.nf b/modules/gatk4/calculatecontamination/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/calculatecontamination/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 28dd7ccf..93a2ee57 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CALCULATECONTAMINATION { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(pileup), path(matched) @@ -28,7 +17,8 @@ process GATK4_CALCULATECONTAMINATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' """ @@ -37,11 +27,11 @@ process GATK4_CALCULATECONTAMINATION { $matched_command \\ -O ${prefix}.contamination.table \\ $segment_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsequencedictionary/functions.nf b/modules/gatk4/createsequencedictionary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsequencedictionary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index db28e244..8d001856 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESEQUENCEDICTIONARY { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: path fasta @@ -26,6 +15,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -37,11 +27,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { CreateSequenceDictionary \\ --REFERENCE $fasta \\ --URI $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsomaticpanelofnormals/functions.nf b/modules/gatk4/createsomaticpanelofnormals/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsomaticpanelofnormals/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 49136256..9bc8d1d0 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESOMATICPANELOFNORMALS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(genomicsdb) @@ -30,18 +19,19 @@ process GATK4_CREATESOMATICPANELOFNORMALS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk \\ CreateSomaticPanelOfNormals \\ -R $fasta \\ -V gendb://$genomicsdb \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/estimatelibrarycomplexity/functions.nf b/modules/gatk4/estimatelibrarycomplexity/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/estimatelibrarycomplexity/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index bfaeedbc..b0b35e42 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_ESTIMATELIBRARYCOMPLEXITY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(cram) @@ -29,7 +18,8 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") def avail_mem = 3 @@ -44,11 +34,11 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { -O ${prefix}.metrics \ --REFERENCE_SEQUENCE ${fasta} \ --VALIDATION_STRINGENCY SILENT \ - --TMP_DIR . $options.args + --TMP_DIR . $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/fastqtosam/functions.nf b/modules/gatk4/fastqtosam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/fastqtosam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index 5879618d..fc075735 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FASTQTOSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -26,18 +15,19 @@ process GATK4_FASTQTOSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" """ gatk FastqToSam \\ $read_files \\ -O ${prefix}.bam \\ -SM $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/filtermutectcalls/functions.nf b/modules/gatk4/filtermutectcalls/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/filtermutectcalls/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 6e10ff0f..7111db37 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FILTERMUTECTCALLS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) @@ -31,7 +20,8 @@ process GATK4_FILTERMUTECTCALLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def orientationbias_options = '' if (orientationbias) { @@ -55,11 +45,11 @@ process GATK4_FILTERMUTECTCALLS { $segmentation_options \\ $contamination_options \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/genomicsdbimport/functions.nf b/modules/gatk4/genomicsdbimport/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/genomicsdbimport/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index c5582563..110dbf4f 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GENOMICSDBIMPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(intervalfile), val(intervalval), path(wspace) @@ -31,7 +20,8 @@ process GATK4_GENOMICSDBIMPORT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // settings for running default create gendb mode inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" @@ -57,11 +47,11 @@ process GATK4_GENOMICSDBIMPORT { $inputs_command \\ $dir_command \\ $intervals_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/genotypegvcfs/functions.nf b/modules/gatk4/genotypegvcfs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/genotypegvcfs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index 6fbbe663..ddb4a922 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GENOTYPEGVCFS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" input: tuple val(meta), path(gvcf), path(gvcf_index) @@ -32,14 +21,15 @@ process GATK4_GENOTYPEGVCFS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" """ gatk \\ GenotypeGVCFs \\ - $options.args \\ + $args \\ $interval_options \\ $dbsnp_options \\ -R $fasta \\ @@ -47,8 +37,8 @@ process GATK4_GENOTYPEGVCFS { -O ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/getpileupsummaries/functions.nf b/modules/gatk4/getpileupsummaries/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/getpileupsummaries/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index f08d4d91..0894e17b 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GETPILEUPSUMMARIES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -29,7 +18,8 @@ process GATK4_GETPILEUPSUMMARIES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def sitesCommand = '' sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " @@ -40,11 +30,11 @@ process GATK4_GETPILEUPSUMMARIES { -V $variants \\ $sitesCommand \\ -O ${prefix}.pileups.table \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/haplotypecaller/functions.nf b/modules/gatk4/haplotypecaller/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/haplotypecaller/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 1e540d17..418a2785 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_HAPLOTYPECALLER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) @@ -33,7 +22,8 @@ process GATK4_HAPLOTYPECALLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval_option = interval ? "-L ${interval}" : "" def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" def avail_mem = 3 @@ -51,12 +41,12 @@ process GATK4_HAPLOTYPECALLER { ${dbsnp_option} \\ ${interval_option} \\ -O ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --tmp-dir . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/indexfeaturefile/functions.nf b/modules/gatk4/indexfeaturefile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/indexfeaturefile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf index 8f40a3e3..d33e030c 100644 --- a/modules/gatk4/indexfeaturefile/main.nf +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_INDEXFEATUREFILE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" input: tuple val(meta), path(feature_file) @@ -26,15 +15,16 @@ process GATK4_INDEXFEATUREFILE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ gatk \\ IndexFeatureFile \\ - $options.args \\ + $args \\ -I $feature_file cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/intervallisttools/functions.nf b/modules/gatk4/intervallisttools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/intervallisttools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 5da651b9..8e5b70e1 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_INTERVALLISTTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(interval_list) @@ -26,7 +15,8 @@ process GATK4_INTERVALLISTTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir ${prefix}_split @@ -35,7 +25,7 @@ process GATK4_INTERVALLISTTOOLS { IntervalListTools \\ -I ${interval_list} \\ -O ${prefix}_split \\ - $options.args + $args python3 < versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/learnreadorientationmodel/functions.nf b/modules/gatk4/learnreadorientationmodel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/learnreadorientationmodel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index b8aee764..5e9700e3 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_LEARNREADORIENTATIONMODEL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(f1r2) @@ -26,7 +15,8 @@ process GATK4_LEARNREADORIENTATIONMODEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } """ @@ -34,11 +24,11 @@ process GATK4_LEARNREADORIENTATIONMODEL { LearnReadOrientationModel \\ ${inputs_list.join(' ')} \\ -O ${prefix}.tar.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/markduplicates/functions.nf b/modules/gatk4/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index e44f4bfc..9f0b46da 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MARKDUPLICATES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -28,7 +17,8 @@ process GATK4_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") def avail_mem = 3 if (!task.memory) { @@ -43,11 +33,11 @@ process GATK4_MARKDUPLICATES { --TMP_DIR . \\ --CREATE_INDEX true \\ --OUTPUT ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergebamalignment/functions.nf b/modules/gatk4/mergebamalignment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergebamalignment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 9c5fe26c..01effb0f 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEBAMALIGNMENT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(aligned) @@ -29,18 +18,19 @@ process GATK4_MERGEBAMALIGNMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk MergeBamAlignment \\ ALIGNED=$aligned \\ UNMAPPED=$unmapped \\ R=$fasta \\ O=${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergevcfs/functions.nf b/modules/gatk4/mergevcfs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergevcfs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index 28073fcb..cbfc2e9d 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEVCFS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcfs) @@ -28,7 +17,8 @@ process GATK4_MERGEVCFS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // Make list of VCFs to merge def input = "" @@ -41,11 +31,11 @@ process GATK4_MERGEVCFS { $input \\ O=${prefix}.vcf.gz \\ $ref \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/functions.nf b/modules/gatk4/mutect2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mutect2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index e0e2661b..662b3f0c 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MUTECT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta) , path(input) , path(input_index) , val(which_norm) @@ -40,7 +29,8 @@ process GATK4_MUTECT2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def panels_command = '' def normals_command = '' @@ -70,11 +60,11 @@ process GATK4_MUTECT2 { ${normals_command} \\ ${panels_command} \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/revertsam/functions.nf b/modules/gatk4/revertsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/revertsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 7b5ee696..bca31a29 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_REVERTSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process GATK4_REVERTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk RevertSam \\ I=$bam \\ O=${prefix}.reverted.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/samtofastq/functions.nf b/modules/gatk4/samtofastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/samtofastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 843c61ce..aa9a6b2d 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SAMTOFASTQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process GATK4_SAMTOFASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" """ gatk SamToFastq \\ I=$bam \\ $output \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/functions.nf b/modules/gatk4/splitncigarreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/splitncigarreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 26fb799d..32d36df9 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SPLITNCIGARREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -29,17 +18,18 @@ process GATK4_SPLITNCIGARREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk SplitNCigarReads \\ -R $fasta \\ -I $bam \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/variantfiltration/functions.nf b/modules/gatk4/variantfiltration/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/variantfiltration/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index e0f0727a..d5cc1eb3 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_VARIANTFILTRATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(vcf_tbi) @@ -30,7 +19,8 @@ process GATK4_VARIANTFILTRATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -42,11 +32,11 @@ process GATK4_VARIANTFILTRATION { -R $fasta \\ -V $vcf \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/genmap/index/functions.nf b/modules/genmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/index/main.nf b/modules/genmap/index/main.nf index c79596f0..943f1a31 100644 --- a/modules/genmap/index/main.nf +++ b/modules/genmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_INDEX { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path fasta @@ -26,6 +15,7 @@ process GENMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ index \\ @@ -33,8 +23,8 @@ process GENMAP_INDEX { -I genmap cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genmap/mappability/functions.nf b/modules/genmap/mappability/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/mappability/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/mappability/main.nf b/modules/genmap/mappability/main.nf index 4d858cbb..94083f14 100644 --- a/modules/genmap/mappability/main.nf +++ b/modules/genmap/mappability/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_MAPPABILITY { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path index @@ -28,16 +17,17 @@ process GENMAP_MAPPABILITY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ map \\ - $options.args \\ + $args \\ -I $index \\ -O mappability cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genrich/functions.nf b/modules/genrich/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genrich/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index f34f9cd2..dfbebd3a 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENRICH { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::genrich=0.6.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1" - } else { - container "quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1' : + 'quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1' }" input: tuple val(meta), path(treatment_bam) @@ -36,7 +25,8 @@ process GENRICH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def control = control_bam ? "-c $control_bam" : '' def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" @@ -44,7 +34,7 @@ process GENRICH { def bed = save_bed ? "-b ${prefix}.intervals.bed" : "" def duplicates = "" if (save_duplicates) { - if (options.args.contains('-r')) { + if (args.contains('-r')) { duplicates = "-R ${prefix}.duplicates.txt" } else { log.info '[Genrich] Duplicates can only be saved if they are filtered, defaulting to -r option (Remove PCR duplicates).' @@ -54,7 +44,7 @@ process GENRICH { """ Genrich \\ -t $treatment_bam \\ - $options.args \\ + $args \\ $control \\ $blacklist \\ -o ${prefix}.narrowPeak \\ @@ -65,8 +55,8 @@ process GENRICH { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') + "${task.process}": + genrich: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gffread/functions.nf b/modules/gffread/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gffread/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 4133ee08..d31f76f8 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GFFREAD { tag "$gff" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gffread=0.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0" - } else { - container "quay.io/biocontainers/gffread:0.12.1--h8b12597_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0' : + 'quay.io/biocontainers/gffread:0.12.1--h8b12597_0' }" input: path gff @@ -26,15 +15,16 @@ process GFFREAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${gff.baseName}${options.suffix}" : "${gff.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${gff.baseName}${task.ext.suffix}" : "${gff.baseName}" """ gffread \\ $gff \\ - $options.args \\ + $args \\ -o ${prefix}.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gffread --version 2>&1) + "${task.process}": + gffread: \$(gffread --version 2>&1) END_VERSIONS """ } diff --git a/modules/glnexus/functions.nf b/modules/glnexus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/glnexus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 1384334f..e36729b2 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GLNEXUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::glnexus=1.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0" - } else { - container "quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0' : + 'quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0' }" input: tuple val(meta), path(gvcfs) @@ -26,7 +15,8 @@ process GLNEXUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // Make list of GVCFs to merge def input = gvcfs.collect { it.toString() } @@ -40,13 +30,13 @@ process GLNEXUS { glnexus_cli \\ --threads $task.cpus \\ --mem-gbytes $avail_mem \\ - $options.args \\ + $args \\ ${input.join(' ')} \\ > ${prefix}.bcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') + "${task.process}": + glnexus: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/align/functions.nf b/modules/graphmap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index 831b0b3b..e0f2d4cd 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_ALIGN { tag "$meta.id" label 'process_medium' tag "$meta.id" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process GRAPHMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ graphmap2 \\ align \\ @@ -38,11 +28,11 @@ process GRAPHMAP2_ALIGN { -i $index \\ -d $reads \\ -o ${prefix}.sam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/index/functions.nf b/modules/graphmap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index a8b03074..fffc7bcb 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: path fasta @@ -25,17 +14,18 @@ process GRAPHMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ graphmap2 \\ align \\ -t $task.cpus \\ -I \\ - $options.args \\ + $args \\ -r $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gstama/collapse/functions.nf b/modules/gstama/collapse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/collapse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index 8fc7877f..d8a64113 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_COLLAPSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0" - - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -37,17 +25,18 @@ process GSTAMA_COLLAPSE { tuple val(meta), path("*_variants.txt") , emit: variants, optional: true script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ tama_collapse.py \\ -s $bam \\ -f $fasta \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) + "${task.process}": + gstama: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) END_VERSIONS """ } diff --git a/modules/gstama/merge/functions.nf b/modules/gstama/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf index 37d685f6..4a8e829c 100644 --- a/modules/gstama/merge/main.nf +++ b/modules/gstama/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -30,17 +19,18 @@ process GSTAMA_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ tama_merge.py \\ -f $filelist \\ -d merge_dup \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_merge.py -version | head -n1 ) + "${task.process}": + gstama: \$( tama_merge.py -version | head -n1 ) END_VERSIONS """ } diff --git a/modules/gtdbtk/classifywf/functions.nf b/modules/gtdbtk/classifywf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gtdbtk/classifywf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gtdbtk/classifywf/main.nf b/modules/gtdbtk/classifywf/main.nf index fdcef76a..4a4b3a01 100644 --- a/modules/gtdbtk/classifywf/main.nf +++ b/modules/gtdbtk/classifywf/main.nf @@ -1,22 +1,12 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.5.0' // When using stubs for the GTDB database, the version info isn't printed. +def VERSION = '1.5.0' // Version information not provided by tool on CLI process GTDBTK_CLASSIFYWF { tag "${meta.assembler}-${meta.id}" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gtdbtk=1.5.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0' : + 'quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0' }" input: tuple val(meta), path("bins/*") @@ -35,6 +25,7 @@ process GTDBTK_CLASSIFYWF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def pplacer_scratch = params.gtdbtk_pplacer_scratch ? "--scratch_dir pplacer_tmp" : "" """ export GTDBTK_DATA_PATH="\${PWD}/database" @@ -43,7 +34,7 @@ process GTDBTK_CLASSIFYWF { fi gtdbtk classify_wf \\ - $options.args \\ + $args \\ --genome_dir bins \\ --prefix "gtdbtk.${meta.assembler}-${meta.id}" \\ --out_dir "\${PWD}" \\ @@ -58,8 +49,8 @@ process GTDBTK_CLASSIFYWF { mv gtdbtk.warnings.log "gtdbtk.${meta.assembler}-${meta.id}.warnings.log" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") + "${task.process}": + gtdbtk: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") END_VERSIONS """ @@ -76,8 +67,8 @@ process GTDBTK_CLASSIFYWF { touch gtdbtk.${meta.assembler}-${meta.id}.failed_genomes.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + gtdbtk: $VERSION END_VERSIONS """ } diff --git a/modules/gubbins/functions.nf b/modules/gubbins/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gubbins/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index da194906..b4c6dc23 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUBBINS { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::gubbins=3.0.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0" - } else { - container "quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0' : + 'quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0' }" input: path alignment @@ -33,14 +22,15 @@ process GUBBINS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ run_gubbins.py \\ --threads $task.cpus \\ - $options.args \\ + $args \\ $alignment cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(run_gubbins.py --version 2>&1) + "${task.process}": + gubbins: \$(run_gubbins.py --version 2>&1) END_VERSIONS """ } diff --git a/modules/gunc/downloaddb/functions.nf b/modules/gunc/downloaddb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/downloaddb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/downloaddb/main.nf b/modules/gunc/downloaddb/main.nf index af421608..430b862b 100644 --- a/modules/gunc/downloaddb/main.nf +++ b/modules/gunc/downloaddb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_DOWNLOADDB { tag '$db_name' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: val db_name @@ -26,12 +15,13 @@ process GUNC_DOWNLOADDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - gunc download_db . -db $db_name $options.args + gunc download_db . -db $db_name $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunc/run/functions.nf b/modules/gunc/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf index f873a7df..6ac681ad 100644 --- a/modules/gunc/run/main.nf +++ b/modules/gunc/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_RUN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -28,18 +17,19 @@ process GUNC_RUN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gunc \\ run \\ --input_fasta $fasta \\ --db_file $db \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunzip/functions.nf b/modules/gunzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index 564fa99d..77a4e546 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: tuple val(meta), path(archive) @@ -26,16 +15,17 @@ process GUNZIP { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' gunzip = archive.toString() - '.gz' """ gunzip \\ -f \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') + "${task.process}": + gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/gunzip/test.txt.gz b/modules/gunzip/test.txt.gz deleted file mode 100644 index 381417cf643f1b5c547b57b251d71e6d5ce11e16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmb2|=HU3lo{`AFT#{N`qE}K;!r-m#=Xv^+o}cIW6JE^0nUR_|V;IhR&VMY%z`y_i DevlAG diff --git a/modules/hicap/functions.nf b/modules/hicap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hicap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index fbc157b1..ed1d7797 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HICAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hicap=1.0.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0" - } else { - container "quay.io/biocontainers/hicap:1.0.3--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0' : + 'quay.io/biocontainers/hicap:1.0.3--py_0' }" input: tuple val(meta), path(fasta) @@ -30,7 +19,8 @@ process HICAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def database_args = database_dir ? "--database_dir ${database_dir}" : "" def model_args = model_fp ? "--model_fp ${model_fp}" : "" def is_compressed = fasta.getName().endsWith(".gz") ? true : false @@ -43,13 +33,13 @@ process HICAP { --query_fp $fasta_name \\ $database_args \\ $model_args \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ./ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) + "${task.process}": + hicap: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) END_VERSIONS """ } diff --git a/modules/hifiasm/functions.nf b/modules/hifiasm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hifiasm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 9dfc9618..7fc857f1 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HIFIASM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hifiasm=0.15.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0" - } else { - container "quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0' : + 'quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0' }" input: tuple val(meta), path(reads) @@ -37,11 +26,12 @@ process HIFIASM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (use_parental_kmers) { """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ -1 $paternal_kmer_dump \\ @@ -49,21 +39,21 @@ process HIFIASM { $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } else { // Phasing with Hi-C data is not supported yet """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } diff --git a/modules/hisat2/align/functions.nf b/modules/hisat2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 9b73216b..0c5f4134 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hisat2=2.2.0 bioconda::samtools=1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } else { - container "quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' : + 'quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' }" input: tuple val(meta), path(reads) @@ -26,14 +15,14 @@ process HISAT2_ALIGN { path splicesites output: - tuple val(meta), path("*.bam"), emit: bam - tuple val(meta), path("*.log"), emit: summary - path "versions.yml" , emit: versions - + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.log") , emit: summary tuple val(meta), path("*fastq.gz"), optional:true, emit: fastq + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -55,12 +44,12 @@ process HISAT2_ALIGN { --threads $task.cpus \\ $seq_center \\ $unaligned \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 256 - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ @@ -80,7 +69,7 @@ process HISAT2_ALIGN { $unaligned \\ --no-mixed \\ --no-discordant \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 8 -F 256 - > ${prefix}.bam if [ -f ${prefix}.unmapped.fastq.1.gz ]; then @@ -91,8 +80,8 @@ process HISAT2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/hisat2/build/functions.nf b/modules/hisat2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index 015f6f59..4e8cd02b 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -1,25 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_BUILD { tag "$fasta" label 'process_high' label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path fasta @@ -31,6 +20,7 @@ process HISAT2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 0 if (!task.memory) { log.info "[HISAT2 index build] Available memory not known - defaulting to 0. Specify process memory requirements to change this." @@ -52,7 +42,6 @@ process HISAT2_BUILD { log.info "[HISAT2 index build] Less than ${hisat2_build_memory} GB available, so NOT using splice sites and exons to build HISAT2 index." log.info "[HISAT2 index build] Use --hisat2_build_memory [small number] to skip this check." } - """ mkdir hisat2 $extract_exons @@ -60,13 +49,13 @@ process HISAT2_BUILD { -p $task.cpus \\ $ss \\ $exon \\ - $options.args \\ + $args \\ $fasta \\ hisat2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hisat2/extractsplicesites/functions.nf b/modules/hisat2/extractsplicesites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/extractsplicesites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index 1c8b7830..302c35f1 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_EXTRACTSPLICESITES { tag "$gtf" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path gtf @@ -28,11 +17,12 @@ process HISAT2_EXTRACTSPLICESITES { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/gccounter/functions.nf b/modules/hmmcopy/gccounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/gccounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/gccounter/main.nf b/modules/hmmcopy/gccounter/main.nf index 6e7bc11f..36666095 100644 --- a/modules/hmmcopy/gccounter/main.nf +++ b/modules/hmmcopy/gccounter/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_GCCOUNTER { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: path fasta @@ -27,14 +16,15 @@ process HMMCOPY_GCCOUNTER { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ gcCounter \\ - $options.args \\ + $args \\ ${fasta} > ${fasta.baseName}.gc.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/readcounter/functions.nf b/modules/hmmcopy/readcounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/readcounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 9e3e72a7..6cd776a1 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_READCOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(bam), path(bai) output: - tuple val(meta), path("*.wig"), emit: wig - path "versions.yml" , emit: versions + tuple val(meta), path("*.wig"), emit: wig + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ readCounter \\ - $options.args \\ + $args \\ ${bam} > ${prefix}.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmer/hmmalign/functions.nf b/modules/hmmer/hmmalign/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmer/hmmalign/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index b4292feb..a25871e8 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HMMER_HMMALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmer=3.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1" - } else { - container "quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1' : + 'quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process HMMER_HMMALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ $fastacmd | \\ hmmalign \\ - $options.args \\ + $args \\ $hmm \\ - | gzip -c > ${meta.id}.sthlm.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') + "${task.process}": + hmmer: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') END_VERSIONS """ } diff --git a/modules/homer/annotatepeaks/functions.nf b/modules/homer/annotatepeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/annotatepeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 1714644b..321dbc7c 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_ANNOTATEPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(peak) @@ -30,19 +19,20 @@ process HOMER_ANNOTATEPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ annotatePeaks.pl \\ $peak \\ $fasta \\ - $options.args \\ + $args \\ -gtf $gtf \\ -cpu $task.cpus \\ > ${prefix}.annotatePeaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/findpeaks/functions.nf b/modules/homer/findpeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/findpeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index 2e0b6db9..a39fe753 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_FINDPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,17 +17,18 @@ process HOMER_FINDPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ findPeaks \\ $tagDir \\ - $options.args \\ + $args \\ -o ${prefix}.peaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/maketagdirectory/functions.nf b/modules/homer/maketagdirectory/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/maketagdirectory/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 4f531e82..44490d50 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKETAGDIRECTORY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(bed) @@ -26,20 +15,21 @@ process HOMER_MAKETAGDIRECTORY { output: tuple val(meta), path("tag_dir"), emit: tagdir - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ makeTagDirectory \\ tag_dir \\ - $options.args \\ + $args \\ $bed \\ -genome $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/makeucscfile/functions.nf b/modules/homer/makeucscfile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/makeucscfile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index c56da24b..8a0e3f37 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKEUCSCFILE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,16 +17,17 @@ process HOMER_MAKEUCSCFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ makeUCSCfile \\ $tagDir \\ - -o auto - $options.args + -o auto \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/idr/functions.nf b/modules/idr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/idr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/idr/main.nf b/modules/idr/main.nf index 006826ac..44b07be4 100644 --- a/modules/idr/main.nf +++ b/modules/idr/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IDR { tag "$prefix" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::idr=2.0.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5" - } else { - container "quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5' : + 'quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5' }" input: path peaks @@ -30,6 +19,7 @@ process IDR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' if (peaks.toList().size < 2) { log.error "[ERROR] idr needs at least two replicates only one provided." } @@ -46,11 +36,11 @@ process IDR { --output-file $idr_vals \\ --log-output-file $log_file \\ --plot \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') + "${task.process}": + idr: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/imputeme/vcftoprs/functions.nf b/modules/imputeme/vcftoprs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/imputeme/vcftoprs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf index a3ce7e3c..0c8c1952 100644 --- a/modules/imputeme/vcftoprs/main.nf +++ b/modules/imputeme/vcftoprs/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' - - -params.options = [:] -options = initOptions(params.options) - process IMPUTEME_VCFTOPRS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "YOUR-TOOL-HERE" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img" - } else { - container "biocontainers/imputeme:vv1.0.7_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img' : + 'biocontainers/imputeme:vv1.0.7_cv1' }" input: tuple val(meta), path(vcf) @@ -27,14 +15,15 @@ process IMPUTEME_VCFTOPRS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ #!/usr/bin/env Rscript - #Set configuration - either from options.args or from defaults + #Set configuration - either from args or from defaults source("/imputeme/code/impute-me/functions.R") - if(file.exists('$options.args')){ - set_conf("set_from_file",'$options.args') + if(file.exists('$args')){ + set_conf("set_from_file",'$args') }else{ set_conf("set_from_file", "/imputeme/code/impute-me/template/nextflow_default_configuration.R") } diff --git a/modules/iqtree/functions.nf b/modules/iqtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/iqtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index bec879df..54a6486d 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IQTREE { tag "$alignment" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::iqtree=2.1.4_beta' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0" - } else { - container "quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0' : + 'quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0' }" input: path alignment @@ -27,20 +16,21 @@ process IQTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def fconst_args = constant_sites ? "-fconst $constant_sites" : '' def memory = task.memory.toString().replaceAll(' ', '') """ iqtree \\ $fconst_args \\ - $options.args \\ + $args \\ -s $alignment \\ -nt AUTO \\ -ntmax $task.cpus \\ -mem $memory \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') + "${task.process}": + iqtree: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') END_VERSIONS """ } diff --git a/modules/ismapper/functions.nf b/modules/ismapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ismapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf index 20d3d5b7..4a33261b 100644 --- a/modules/ismapper/main.nf +++ b/modules/ismapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISMAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ismapper=2.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1' : + 'quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1' }" input: tuple val(meta), path(reads), path(reference), path(query) @@ -26,10 +15,11 @@ process ISMAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ismap \\ - $options.args \\ + $args \\ --t $task.cpus \\ --output_dir results \\ --queries $query \\ @@ -37,8 +27,8 @@ process ISMAPPER { --reads $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) + "${task.process}": + ismapper: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) END_VERSIONS """ } diff --git a/modules/isoseq3/cluster/functions.nf b/modules/isoseq3/cluster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/cluster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index df005706..27d5c3d8 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_CLUSTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -27,28 +16,27 @@ process ISOSEQ3_CLUSTER { tuple val(meta), path("*.transcripts.cluster") , emit: cluster tuple val(meta), path("*.transcripts.cluster_report.csv"), emit: cluster_report tuple val(meta), path("*.transcripts.transcriptset.xml") , emit: transcriptset - path "versions.yml" , emit: versions - tuple val(meta), path("*.transcripts.hq.bam") , optional: true, emit: hq_bam tuple val(meta), path("*.transcripts.hq.bam.pbi") , optional: true, emit: hq_pbi tuple val(meta), path("*.transcripts.lq.bam") , optional: true, emit: lq_bam tuple val(meta), path("*.transcripts.lq.bam.pbi") , optional: true, emit: lq_pbi tuple val(meta), path("*.transcripts.singletons.bam") , optional: true, emit: singletons_bam tuple val(meta), path("*.transcripts.singletons.bam.pbi"), optional: true, emit: singletons_pbi - + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ isoseq3 \\ cluster \\ $bam \\ ${prefix}.transcripts.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - isoseq3 cluster: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) + "${task.process}": + isoseq3: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) END_VERSIONS """ } diff --git a/modules/isoseq3/refine/functions.nf b/modules/isoseq3/refine/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/refine/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf index 5a45eb2d..5bde2f8f 100644 --- a/modules/isoseq3/refine/main.nf +++ b/modules/isoseq3/refine/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_REFINE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -31,19 +20,20 @@ process ISOSEQ3_REFINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ isoseq3 \\ refine \\ -j $task.cpus \\ - $options.args \\ + $args \\ $bam \\ $primers \\ ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) + "${task.process}": + isoseq3: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) END_VERSIONS """ } diff --git a/modules/ivar/consensus/functions.nf b/modules/ivar/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 33fa11f7..4a657756 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,23 @@ process IVAR_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ samtools mpileup \\ --reference $fasta \\ - $options.args2 \\ + $args2 \\ $bam | \\ $save_mpileup \\ ivar consensus \\ - $options.args \\ + $args \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/trim/functions.nf b/modules/ivar/trim/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/trim/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 6cf8171c..35798123 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_TRIM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,18 +17,19 @@ process IVAR_TRIM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ivar trim \\ - $options.args \\ + $args \\ -i $bam \\ -b $bed \\ -p $prefix \\ > ${prefix}.ivar.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/variants/functions.nf b/modules/ivar/variants/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/variants/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index d079a8e9..ba791307 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_VARIANTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,24 +18,26 @@ process IVAR_VARIANTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" """ samtools mpileup \\ - $options.args2 \\ + $args2 \\ --reference $fasta \\ $bam | \\ $save_mpileup \\ ivar variants \\ - $options.args \\ + $args \\ $features \\ -r $fasta \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/jupyternotebook/functions.nf b/modules/jupyternotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/jupyternotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf index 2d8ad92f..02f1947f 100644 --- a/modules/jupyternotebook/main.nf +++ b/modules/jupyternotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' include { dump_params_yml; indent_code_block } from "./parametrize" -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true - process JUPYTERNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //ipykernel, jupytext, papermill and nbconvert Python packages. conda (params.enable_conda ? "ipykernel=6.0.3 jupytext=1.11.4 nbconvert=6.1.0 papermill=2.3.3 matplotlib=3.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } else { - container "quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' : + 'quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' }" input: tuple val(meta), path(notebook) @@ -36,7 +23,11 @@ process JUPYTERNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -44,14 +35,14 @@ process JUPYTERNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -71,10 +62,10 @@ process JUPYTERNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" - export NUMBA_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" + export NUMBA_NUM_THREADS="$task.cpus" # Convert notebook to ipynb using jupytext, execute using papermill, convert using nbconvert jupytext --to notebook --output - --set-kernel - ${notebook} \\ @@ -82,7 +73,7 @@ process JUPYTERNOTEBOOK { | jupyter nbconvert --stdin --to html --output ${prefix}.html cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": jupytext: \$(jupytext --version) ipykernel: \$(python -c "import ipykernel; print(ipykernel.__version__)") nbconvert: \$(jupyter nbconvert --version) diff --git a/modules/kallisto/index/functions.nf b/modules/kallisto/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallisto/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 96457b6d..4dc9c6d0 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTO_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::kallisto=0.46.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1" - } else { - container "quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1' : + 'quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1' }" input: path fasta @@ -26,16 +15,17 @@ process KALLISTO_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ kallisto \\ index \\ - $options.args \\ + $args \\ -i kallisto \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') + "${task.process}": + kallisto: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/functions.nf b/modules/kallistobustools/count/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/count/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 8c705e51..00ca8971 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_COUNT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: tuple val(meta), path(reads) @@ -24,7 +13,7 @@ process KALLISTOBUSTOOLS_COUNT { path t2g path t1c path t2c - val workflow + val workflow_mode val technology output: @@ -32,7 +21,8 @@ process KALLISTOBUSTOOLS_COUNT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' """ @@ -43,16 +33,16 @@ process KALLISTOBUSTOOLS_COUNT { -g $t2g \\ $cdna \\ $introns \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ -x $technology \\ - $options.args \\ + $args \\ -o ${prefix}.count \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index bc2433bb..911697d2 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -39,9 +39,9 @@ input: type: file description: kb ref's c2 unspliced_t2c file pattern: "*.{introns_t2c.txt}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus,kite}" - technology: type: value diff --git a/modules/kallistobustools/ref/functions.nf b/modules/kallistobustools/ref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/ref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index a8287498..1e496f67 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_REF { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: path fasta path gtf - val workflow + val workflow_mode output: path "versions.yml" , emit: versions @@ -33,20 +22,21 @@ process KALLISTOBUSTOOLS_REF { path "intron_t2c.txt" , optional:true, emit: intron_t2c script: - if (workflow == "standard") { + def args = task.ext.args ?: '' + if (workflow_mode == "standard") { """ kb \\ ref \\ -i kb_ref_out.idx \\ -g t2g.txt \\ -f1 cdna.fa \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } else { @@ -59,13 +49,13 @@ process KALLISTOBUSTOOLS_REF { -f2 intron.fa \\ -c1 cdna_t2c.txt \\ -c2 intron_t2c.txt \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/ref/meta.yml b/modules/kallistobustools/ref/meta.yml index 353b9c11..dcc78c66 100644 --- a/modules/kallistobustools/ref/meta.yml +++ b/modules/kallistobustools/ref/meta.yml @@ -21,9 +21,9 @@ input: type: file description: Genomic gtf file pattern: "*.{gtf,gtf.gz}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus}" output: diff --git a/modules/khmer/normalizebymedian/functions.nf b/modules/khmer/normalizebymedian/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/khmer/normalizebymedian/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/khmer/normalizebymedian/main.nf b/modules/khmer/normalizebymedian/main.nf index 234d172b..50b3d603 100644 --- a/modules/khmer/normalizebymedian/main.nf +++ b/modules/khmer/normalizebymedian/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KHMER_NORMALIZEBYMEDIAN { tag "${name}" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::khmer=3.0.0a3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2" - } else { - container "quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2' : + 'quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2' }" input: path pe_reads @@ -28,22 +17,22 @@ process KHMER_NORMALIZEBYMEDIAN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' pe_args = pe_reads ? "--paired" : "" se_args = se_reads ? "--unpaired-reads ${se_reads}" : "" files = pe_reads ? pe_reads : se_reads - """ normalize-by-median.py \\ -M ${task.memory.toGiga()}e9 \\ - --gzip ${options.args} \\ + --gzip $args \\ -o ${name}.fastq.gz \\ - ${pe_args} \\ - ${se_args} \\ - ${files} + $pe_args \\ + $se_args \\ + $files cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) + "${task.process}": + khmer: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) END_VERSIONS """ } diff --git a/modules/kleborate/functions.nf b/modules/kleborate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kleborate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index 5bb76ad0..b64a0c45 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KLEBORATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::kleborate=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1' : + 'quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1' }" input: tuple val(meta), path(fastas) @@ -26,16 +15,17 @@ process KLEBORATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ kleborate \\ - $options.args \\ + $args \\ --outfile ${prefix}.results.txt \\ --assemblies *.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) + "${task.process}": + kleborate: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/functions.nf b/modules/kraken2/kraken2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kraken2/kraken2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 0d4e5840..e5fb4b80 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KRAKEN2_KRAKEN2 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kraken2=2.1.1 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' : + 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process KRAKEN2_KRAKEN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" @@ -42,14 +32,14 @@ process KRAKEN2_KRAKEN2 { --report ${prefix}.kraken2.report.txt \\ --gzip-compressed \\ $paired \\ - $options.args \\ + $args \\ $reads pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') + "${task.process}": + kraken2: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/krona/kronadb/main.nf b/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ca7fc3d3 --- /dev/null +++ b/modules/krona/kronadb/main.nf @@ -0,0 +1,27 @@ +def VERSION='2.7.1' // Version information not provided by tool on CLI + +process KRONA_KRONADB { + label 'process_low' + + conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5' : + 'quay.io/biocontainers/krona:2.7.1--pl526_5' }" + + input: + + output: + path 'taxonomy/taxonomy.tab', emit: db + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktUpdateTaxonomy.sh taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/kronadb/meta.yml b/modules/krona/kronadb/meta.yml similarity index 95% rename from modules/kronatools/kronadb/meta.yml rename to modules/krona/kronadb/meta.yml index 5a637949..2a12aaaf 100644 --- a/modules/kronatools/kronadb/meta.yml +++ b/modules/krona/kronadb/meta.yml @@ -1,11 +1,11 @@ -name: kronatools_kronadb +name: krona_kronadb description: KronaTools Update Taxonomy downloads a taxonomy database keywords: - database - taxonomy - krona tools: - - kronatools: + - krona: description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. homepage: https://github.com/marbl/Krona/wiki/KronaTools documentation: https://github.com/marbl/Krona/wiki/Installing diff --git a/modules/krona/ktimporttaxonomy/main.nf b/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..bc79c98c --- /dev/null +++ b/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,30 @@ +def VERSION = '2.8' // Version information not provided by tool on CLI + +process KRONA_KTIMPORTTAXONOMY { + tag "${meta.id}" + label 'process_high' + + conda (params.enable_conda ? "bioconda::krona=2.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2' : + 'quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2' }" + + input: + tuple val(meta), path(report) + path "taxonomy/taxonomy.tab" + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktImportTaxonomy "$report" -tax taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/ktimporttaxonomy/meta.yml b/modules/krona/ktimporttaxonomy/meta.yml similarity index 95% rename from modules/kronatools/ktimporttaxonomy/meta.yml rename to modules/krona/ktimporttaxonomy/meta.yml index f37f2db4..b65919f8 100644 --- a/modules/kronatools/ktimporttaxonomy/meta.yml +++ b/modules/krona/ktimporttaxonomy/meta.yml @@ -1,4 +1,4 @@ -name: kronatools_ktimporttaxonomy +name: krona_ktimporttaxonomy description: KronaTools Import Taxonomy imports taxonomy classifications and produces an interactive Krona plot. keywords: - plot @@ -8,7 +8,7 @@ keywords: - visualisation - krona chart tools: - - kronatools: + - krona: description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. homepage: https://github.com/marbl/Krona/wiki/KronaTools documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html @@ -24,7 +24,7 @@ input: e.g. [ id:'test'] - database: type: path - description: "Path to the taxonomy database downloaded by kronatools/kronadb" + description: "Path to the taxonomy database downloaded by krona/kronadb" - report: type: file description: "A tab-delimited file with taxonomy IDs and (optionally) query IDs, magnitudes, and scores. Query IDs are taken from column 1, taxonomy IDs from column 2, and scores from column 3. Lines beginning with # will be ignored." diff --git a/modules/kronatools/kronadb/functions.nf b/modules/kronatools/kronadb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kronatools/kronadb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kronatools/kronadb/main.nf b/modules/kronatools/kronadb/main.nf deleted file mode 100644 index 7dee12d0..00000000 --- a/modules/kronatools/kronadb/main.nf +++ /dev/null @@ -1,35 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process KRONATOOLS_KRONADB { - label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5" - } else { - container "quay.io/biocontainers/krona:2.7.1--pl526_5" - } - input: - - output: - path 'taxonomy/taxonomy.tab', emit: db - path "versions.yml" , emit: versions - - script: - def VERSION='2.7.1' - """ - ktUpdateTaxonomy.sh taxonomy - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION - END_VERSIONS - """ -} diff --git a/modules/kronatools/ktimporttaxonomy/functions.nf b/modules/kronatools/ktimporttaxonomy/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kronatools/ktimporttaxonomy/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kronatools/ktimporttaxonomy/main.nf b/modules/kronatools/ktimporttaxonomy/main.nf deleted file mode 100644 index 893bc5b2..00000000 --- a/modules/kronatools/ktimporttaxonomy/main.nf +++ /dev/null @@ -1,39 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process KRONATOOLS_KTIMPORTTAXONOMY { - tag "${meta.id}" - label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::krona=2.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2" - } else { - container "quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2" - } - - input: - tuple val(meta), path(report) - path "taxonomy/taxonomy.tab" - - output: - tuple val(meta), path ('*.html'), emit: html - path "versions.yml" , emit: versions - - script: - def VERSION='2.8' - """ - ktImportTaxonomy "$report" -tax taxonomy - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION - END_VERSIONS - """ -} diff --git a/modules/last/dotplot/functions.nf b/modules/last/dotplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/dotplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index d02e98ad..51667378 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_DOTPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -28,17 +17,18 @@ process LAST_DOTPLOT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ last-dotplot \\ - $options.args \\ + $args \\ $maf \\ $prefix.$format # last-dotplot has no --version option so let's use lastal from the same suite cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastal/functions.nf b/modules/last/lastal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index c4335f25..4b90a965 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTAL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx), path (param_file) @@ -27,13 +16,14 @@ process LAST_LASTAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) lastal \\ $trained_params \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ @@ -42,8 +32,8 @@ process LAST_LASTAL { # which makes its checksum non-reproducible. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastdb/functions.nf b/modules/last/lastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index fb765ada..ff6485dc 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTDB { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -26,18 +15,19 @@ process LAST_LASTDB { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir lastdb lastdb \\ - $options.args \\ + $args \\ -P $task.cpus \\ lastdb/${prefix} \\ $fastx cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafconvert/functions.nf b/modules/last/mafconvert/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafconvert/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index 5e259109..f1a7312e 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFCONVERT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -35,15 +24,16 @@ process LAST_MAFCONVERT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - maf-convert $options.args $format $maf | gzip --no-name \\ + maf-convert $args $format $maf | gzip --no-name \\ > ${prefix}.${format}.gz # maf-convert has no --version option but lastdb (part of the same package) has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafswap/functions.nf b/modules/last/mafswap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafswap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index 5ce38c92..c66e47d4 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFSWAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,14 +15,15 @@ process LAST_MAFSWAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - maf-swap $options.args $maf | gzip --no-name > ${prefix}.swapped.maf.gz + maf-swap $args $maf | gzip --no-name > ${prefix}.swapped.maf.gz # maf-swap has no --version option but lastdb, part of the same package, has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/postmask/functions.nf b/modules/last/postmask/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/postmask/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index 3102fbe6..e4f4390a 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_POSTMASK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,15 +15,16 @@ process LAST_POSTMASK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ - last-postmask $options.args $maf | gzip --no-name > ${prefix}.maf.gz + last-postmask $args $maf | gzip --no-name > ${prefix}.maf.gz # last-postmask does not have a --version option cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/split/functions.nf b/modules/last/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index 2a9e5621..ecc47e80 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_SPLIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,13 +15,14 @@ process LAST_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - zcat < $maf | last-split $options.args | gzip --no-name > ${prefix}.maf.gz + zcat < $maf | last-split $args | gzip --no-name > ${prefix}.maf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(last-split --version 2>&1 | sed 's/last-split //') + "${task.process}": + last: \$(last-split --version 2>&1 | sed 's/last-split //') END_VERSIONS """ } diff --git a/modules/last/train/functions.nf b/modules/last/train/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/train/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index f0b958bc..0a949857 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_TRAIN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -27,20 +16,21 @@ process LAST_TRAIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) last-train \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ > ${prefix}.\$INDEX_NAME.par cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/leehom/functions.nf b/modules/leehom/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/leehom/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf index e0d9ee39..d997e68b 100644 --- a/modules/leehom/main.nf +++ b/modules/leehom/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION="1.2.15" +def VERSION = '1.2.15' // Version information not provided by tool on CLI process LEEHOM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::leehom=1.2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1" - } else { - container "quay.io/biocontainers/leehom:1.2.15--h29e30f7_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1' : + 'quay.io/biocontainers/leehom:1.2.15--h29e30f7_1' }" input: tuple val(meta), path(reads) @@ -32,54 +21,54 @@ process LEEHOM { tuple val(meta), path("${prefix}_r2.fq.gz") , optional: true, emit: unmerged_r2_fq_pass tuple val(meta), path("${prefix}_r2.fail.fq.gz"), optional: true, emit: unmerged_r2_fq_fail tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" - if ( reads.toString().endsWith('.bam') ) { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -o ${prefix}.bam \\ - --log ${prefix}.log \\ - $reads + if (reads.toString().endsWith('.bam')) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -o ${prefix}.bam \\ + --log ${prefix}.log \\ + $reads - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ - } else if ( meta.single_end ) { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -fq1 $reads \\ - -fqo ${prefix} \\ - --log ${prefix}.log + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ + } else if (meta.single_end) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 $reads \\ + -fqo $prefix \\ + --log ${prefix}.log - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ } else { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -fq1 ${reads[0]} \\ - -fq2 ${reads[1]} \\ - -fqo ${prefix} \\ - --log ${prefix}.log + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 ${reads[0]} \\ + -fq2 ${reads[1]} \\ + -fqo $prefix \\ + --log ${prefix}.log - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ } } diff --git a/modules/lib/functions.nf b/modules/lib/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lib/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/functions.nf b/modules/lima/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lima/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 16525953..64f6d87d 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LIMA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lima=2.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/lima:2.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0' : + 'quay.io/biocontainers/lima:2.2.0--h9ee0642_0' }" input: tuple val(meta), path(ccs) @@ -40,7 +29,8 @@ process LIMA { tuple val(meta), path("*.json") , optional: true, emit: json script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ OUT_EXT="" @@ -62,11 +52,11 @@ process LIMA { $primers \\ $prefix.\$OUT_EXT \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) + "${task.process}": + lima: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) END_VERSIONS """ } diff --git a/modules/lissero/functions.nf b/modules/lissero/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lissero/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf index ff863aaa..b5cd2b68 100644 --- a/modules/lissero/main.nf +++ b/modules/lissero/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LISSERO { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lissero=0.4.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0" - } else { - container "quay.io/biocontainers/lissero:0.4.9--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0' : + 'quay.io/biocontainers/lissero:0.4.9--py_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process LISSERO { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lissero \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) + "${task.process}": + lissero: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) END_VERSIONS """ } diff --git a/modules/lofreq/call/functions.nf b/modules/lofreq/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index e77d7a78..74995152 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -27,18 +16,19 @@ process LOFREQ_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ call \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/callparallel/functions.nf b/modules/lofreq/callparallel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/callparallel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index a86748d7..63ae2886 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALLPARALLEL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam), path(bai) @@ -28,19 +17,20 @@ process LOFREQ_CALLPARALLEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ call-parallel \\ --pp-threads $task.cpus \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/filter/functions.nf b/modules/lofreq/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 905a961d..6f13ae44 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(vcf) @@ -26,17 +15,18 @@ process LOFREQ_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ filter \\ - $options.args \\ + $args \\ -i $vcf \\ -o ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/indelqual/functions.nf b/modules/lofreq/indelqual/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/indelqual/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index b33a1e04..bf04c5d2 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_INDELQUAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -26,17 +16,18 @@ process LOFREQ_INDELQUAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq indelqual \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.bam \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/macs2/callpeak/functions.nf b/modules/macs2/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/macs2/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index 94f8945b..e8bfcda0 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MACS2_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::macs2=2.2.7.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3" - } else { - container "quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3' : + 'quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3' }" input: tuple val(meta), path(ipbam), path(controlbam) @@ -32,20 +21,21 @@ process MACS2_CALLPEAK { tuple val(meta), path("*.bdg") , optional:true, emit: bdg script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() - def format = meta.single_end ? 'BAM' : 'BAMPE' - def control = controlbam ? "--control $controlbam" : '' - if(args.contains('--format')){ - def id = args.findIndexOf{it=='--format'} - format = args[id+1] - args.remove(id+1) - args.remove(id) + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() + def format = meta.single_end ? 'BAM' : 'BAMPE' + def control = controlbam ? "--control $controlbam" : '' + if(args_list.contains('--format')){ + def id = args_list.findIndexOf{it=='--format'} + format = args_list[id+1] + args_list.remove(id+1) + args_list.remove(id) } """ macs2 \\ callpeak \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ --gsize $macs2_gsize \\ --format $format \\ --name $prefix \\ @@ -53,8 +43,8 @@ process MACS2_CALLPEAK { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(macs2 --version | sed -e "s/macs2 //g") + "${task.process}": + macs2: \$(macs2 --version | sed -e "s/macs2 //g") END_VERSIONS """ } diff --git a/modules/malt/build/functions.nf b/modules/malt/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index 48259a50..d1b0c427 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_BUILD { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastas @@ -30,6 +19,7 @@ process MALT_BUILD { path "malt-build.log", emit: log script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_BUILD] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -46,13 +36,13 @@ process MALT_BUILD { -s $seq_type \\ $igff \\ -d 'malt_index/' \\ - -t ${task.cpus} \\ - $options.args \\ + -t $task.cpus \\ + $args \\ -mdb ${map_db}/*.db |&tee malt-build.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) + "${task.process}": + malt: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) END_VERSIONS """ } diff --git a/modules/malt/run/functions.nf b/modules/malt/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index bc78de8c..8b8f05cc 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_RUN { label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastqs @@ -30,6 +19,7 @@ process MALT_RUN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -40,17 +30,17 @@ process MALT_RUN { """ malt-run \\ -J-Xmx${avail_mem}g \\ - -t ${task.cpus} \\ + -t $task.cpus \\ -v \\ -o . \\ - $options.args \\ + $args \\ --inFile ${fastqs.join(' ')} \\ -m $mode \\ --index $index/ |&tee malt-run.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') + "${task.process}": + malt: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/maltextract/functions.nf b/modules/maltextract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maltextract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf index d909ec96..e3a42016 100644 --- a/modules/maltextract/main.nf +++ b/modules/maltextract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALTEXTRACT { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path rma6 @@ -28,6 +17,7 @@ process MALTEXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MaltExtract \\ -Xmx${task.memory.toGiga()}g \\ @@ -36,11 +26,11 @@ process MALTEXTRACT { -t $taxon_list \\ -r $ncbi_dir \\ -o results/ \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') + "${task.process}": + maltextract: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') END_VERSIONS """ } diff --git a/modules/manta/germline/functions.nf b/modules/manta/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index f957a7ec..553f0be9 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_GERMLINE { mv manta/results/variants/diploidSV.vcf.gz.tbi \ ${prefix}.diploid_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/somatic/functions.nf b/modules/manta/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index f912d478..38d73133 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) @@ -37,7 +26,8 @@ process MANTA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ @@ -60,8 +50,8 @@ process MANTA_SOMATIC { mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/tumoronly/functions.nf b/modules/manta/tumoronly/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/tumoronly/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index f20e8128..dc72fcc4 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_TUMORONLY { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_TUMORONLY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_TUMORONLY { mv manta/results/variants/tumorSV.vcf.gz.tbi \ ${prefix}.tumor_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/mapdamage2/functions.nf b/modules/mapdamage2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mapdamage2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf index e252e27c..3673970e 100644 --- a/modules/mapdamage2/main.nf +++ b/modules/mapdamage2/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MAPDAMAGE2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mapdamage2=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0" - } else { - container "quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0' : + 'quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0' }" input: tuple val(meta), path(bam) @@ -43,16 +33,17 @@ process MAPDAMAGE2 { path "versions.yml",emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mapDamage \\ - $options.args \\ + $args \\ -i $bam \\ -r $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(mapDamage --version)) + "${task.process}": + mapdamage2: \$(echo \$(mapDamage --version)) END_VERSIONS """ } diff --git a/modules/mash/sketch/functions.nf b/modules/mash/sketch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mash/sketch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index f434a5f1..0c0b6e17 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -1,20 +1,10 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASH_SKETCH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mash=2.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1" - } else { - container "quay.io/biocontainers/mash:2.3--he348c14_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1' : + 'quay.io/biocontainers/mash:2.3--he348c14_1' }" input: tuple val(meta), path(reads) @@ -25,19 +15,20 @@ process MASH_SKETCH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mash \\ sketch \\ - $options.args \\ + $args \\ -p $task.cpus \\ -o ${prefix} \\ -r $reads \\ 2> ${prefix}.mash_stats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mash --version 2>&1) + "${task.process}": + mash: \$(mash --version 2>&1) END_VERSIONS """ } diff --git a/modules/mashtree/functions.nf b/modules/mashtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mashtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf index db0b14f5..6728e3ce 100644 --- a/modules/mashtree/main.nf +++ b/modules/mashtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASHTREE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mashtree=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0' : + 'quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0' }" input: tuple val(meta), path(seqs) @@ -27,18 +16,19 @@ process MASHTREE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mashtree \\ - $options.args \\ + $args \\ --numcpus $task.cpus \\ --outmatrix ${prefix}.tsv \\ --outtree ${prefix}.dnd \\ $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) + "${task.process}": + mashtree: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) END_VERSIONS """ } diff --git a/modules/maxbin2/functions.nf b/modules/maxbin2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maxbin2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index bcfa9590..e13af704 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MAXBIN2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::maxbin2=2.2.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2" - } else { - container "quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2' : + 'quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2' }" input: tuple val(meta), path(contigs), path(reads), path(abund) @@ -33,20 +22,21 @@ process MAXBIN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ run_MaxBin.pl \\ -contig $contigs \\ $associate_files \\ -thread $task.cpus \\ - $options.args \\ + $args \\ -out $prefix gzip *.fasta *.noclass *.tooshort *log *.marker cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": maxbin2: \$( run_MaxBin.pl -v | head -n 1 | sed 's/MaxBin //' ) END_VERSIONS """ diff --git a/modules/medaka/functions.nf b/modules/medaka/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/medaka/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf index a0db4150..e7a8b9cc 100644 --- a/modules/medaka/main.nf +++ b/modules/medaka/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MEDAKA { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::medaka=1.4.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0" - } else { - container "quay.io/biocontainers/medaka:1.4.4--py38h130def0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0' : + 'quay.io/biocontainers/medaka:1.4.4--py38h130def0_0' }" input: tuple val(meta), path(reads), path(assembly) @@ -26,11 +15,12 @@ process MEDAKA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ medaka_consensus \\ -t $task.cpus \\ - $options.args \\ + $args \\ -i $reads \\ -d $assembly \\ -o ./ @@ -40,8 +30,8 @@ process MEDAKA { gzip -n ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( medaka --version 2>&1 | sed 's/medaka //g' ) + "${task.process}": + medaka: \$( medaka --version 2>&1 | sed 's/medaka //g' ) END_VERSIONS """ } diff --git a/modules/megahit/functions.nf b/modules/megahit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/megahit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf index 8c8a5555..011fa7d3 100644 --- a/modules/megahit/main.nf +++ b/modules/megahit/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MEGAHIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::megahit=1.2.9 conda-forge::pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } else { - container "quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' : + 'quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' }" input: tuple val(meta), path(reads) @@ -30,25 +19,27 @@ process MEGAHIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ megahit \\ -r ${reads} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } else { @@ -57,19 +48,19 @@ process MEGAHIT { -1 ${reads[0]} \\ -2 ${reads[1]} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } diff --git a/modules/meningotype/functions.nf b/modules/meningotype/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/meningotype/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf index 4e779e8c..5dde5633 100644 --- a/modules/meningotype/main.nf +++ b/modules/meningotype/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MENINGOTYPE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::meningotype=0.8.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0' : + 'quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process MENINGOTYPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ meningotype \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) + "${task.process}": + meningotype: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) END_VERSIONS """ } diff --git a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf index 1860ae16..e35d6715 100644 --- a/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" - } else { - container "quay.io/biocontainers/metabat2:2.15--h986a166_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" input: tuple val(meta), path(bam), path(bai) @@ -25,20 +15,21 @@ process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ export OMP_NUM_THREADS=$task.cpus jgi_summarize_bam_contig_depths \\ --outputDepth ${prefix}.txt \\ - $options.args \\ + $args \\ $bam bgzip --threads $task.cpus ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) END_VERSIONS """ } diff --git a/modules/metabat2/metabat2/functions.nf b/modules/metabat2/metabat2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metabat2/metabat2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index 589e268c..d158af91 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METABAT2_METABAT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" - } else { - container "quay.io/biocontainers/metabat2:2.15--h986a166_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" input: tuple val(meta), path(fasta), path(depth) @@ -26,14 +16,15 @@ process METABAT2_METABAT2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def decompress_depth = depth ? "gzip -d -f $depth" : "" def depth_file = depth ? "-a ${depth.baseName}" : "" """ $decompress_depth metabat2 \\ - $options.args \\ + $args \\ -i $fasta \\ $depth_file \\ -t $task.cpus \\ @@ -46,8 +37,8 @@ process METABAT2_METABAT2 { bgzip --threads $task.cpus bins/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) END_VERSIONS """ } diff --git a/modules/metaphlan3/functions.nf b/modules/metaphlan3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metaphlan3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index c5157b66..9463da6f 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METAPHLAN3 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::metaphlan=3.0.12' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0" - } else { - container "quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0' : + 'quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0' }" input: tuple val(meta), path(input) @@ -29,7 +18,8 @@ process METAPHLAN3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" @@ -39,14 +29,14 @@ process METAPHLAN3 { --nproc $task.cpus \\ $input_type \\ $input_data \\ - $options.args \\ + $args \\ $bowtie2_out \\ --bowtie2db ${metaphlan_db} \\ --biom ${prefix}.biom \\ --output_file ${prefix}_profile.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(metaphlan --version 2>&1 | awk '{print \$3}') + "${task.process}": + metaphlan3: \$(metaphlan --version 2>&1 | awk '{print \$3}') END_VERSIONS """ } diff --git a/modules/methyldackel/extract/functions.nf b/modules/methyldackel/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index 94e4b379..a39c0305 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_EXTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,15 +17,16 @@ process METHYLDACKEL_EXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MethylDackel extract \\ - $options.args \\ + $args \\ $fasta \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/methyldackel/mbias/functions.nf b/modules/methyldackel/mbias/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/mbias/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index c8fd2fa2..1b4b14c4 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_MBIAS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,10 +17,11 @@ process METHYLDACKEL_MBIAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ MethylDackel mbias \\ - $options.args \\ + $args \\ $fasta \\ $bam \\ $prefix \\ @@ -39,8 +29,8 @@ process METHYLDACKEL_MBIAS { > ${prefix}.mbias.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/minia/functions.nf b/modules/minia/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minia/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 140ef9e7..8516ef6e 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIA { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::minia=3.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0" - } else { - container "quay.io/biocontainers/minia:3.2.4--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0' : + 'quay.io/biocontainers/minia:3.2.4--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -28,19 +17,20 @@ process MINIA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_list = reads.join(",") """ echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt minia \\ - $options.args \\ + $args \\ -nb-cores $task.cpus \\ -in input_files.txt \\ -out $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') + "${task.process}": + minia: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') END_VERSIONS """ } diff --git a/modules/miniasm/functions.nf b/modules/miniasm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/miniasm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf index d2652fab..35c2e2c0 100644 --- a/modules/miniasm/main.nf +++ b/modules/miniasm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIASM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::miniasm=0.3_r179" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2" - } else { - container "quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2' : + 'quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2' }" input: tuple val(meta), path(reads), path(paf) @@ -27,10 +16,11 @@ process MINIASM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ miniasm \\ - $options.args \\ + $args \\ -f $reads \\ $paf > \\ ${prefix}.gfa @@ -41,8 +31,8 @@ process MINIASM { gzip -n ${prefix}.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( miniasm -V 2>&1 ) + "${task.process}": + miniasm: \$( miniasm -V 2>&1 ) END_VERSIONS """ } diff --git a/modules/minimap2/align/functions.nf b/modules/minimap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index 215e4fb5..c6c0c316 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process MINIMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ minimap2 \\ - $options.args \\ + $args \\ -t $task.cpus \\ $reference \\ $input_reads \\ > ${prefix}.paf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/minimap2/index/functions.nf b/modules/minimap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index b154a649..10cdd142 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: path fasta @@ -25,16 +14,17 @@ process MINIMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ minimap2 \\ -t $task.cpus \\ -d ${fasta.baseName}.mmi \\ - $options.args \\ + $args \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/mlst/functions.nf b/modules/mlst/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mlst/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf index faac9871..aa338420 100644 --- a/modules/mlst/main.nf +++ b/modules/mlst/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MLST { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mlst=2.19.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/mlst:2.19.0--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1' : + 'quay.io/biocontainers/mlst:2.19.0--hdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,7 +15,8 @@ process MLST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mlst \\ --threads $task.cpus \\ @@ -34,8 +24,8 @@ process MLST { > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) + "${task.process}": + mlst: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) END_VERSIONS """ diff --git a/modules/mosdepth/functions.nf b/modules/mosdepth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mosdepth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index 8fe3cfee..b25e6a3d 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MOSDEPTH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::mosdepth=0.3.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0" - } else { - container "quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0' : + 'quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0' }" input: tuple val(meta), path(bam), path(bai) @@ -34,17 +23,18 @@ process MOSDEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ mosdepth \\ $interval \\ - $options.args \\ + $args \\ $prefix \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') + "${task.process}": + mosdepth: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/msisensor/msi/functions.nf b/modules/msisensor/msi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/msi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index bd5a0a0e..1eb510a1 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_MSI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(normal_bam), path(normal_bai), path(tumor_bam), path(tumor_bai), val(metascan), path(homopolymers) @@ -29,7 +18,8 @@ process MSISENSOR_MSI { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ msisensor \\ msi \\ @@ -37,11 +27,11 @@ process MSISENSOR_MSI { -n $normal_bam \\ -t $tumor_bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/msisensor/scan/functions.nf b/modules/msisensor/scan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/scan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index ebd8785a..2419a0a1 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_SCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process MSISENSOR_SCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ msisensor \\ scan \\ -d $fasta \\ -o ${prefix}.msisensor_scan.tab \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/mtnucratio/functions.nf b/modules/mtnucratio/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mtnucratio/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf index 28d08a13..b8663469 100644 --- a/modules/mtnucratio/main.nf +++ b/modules/mtnucratio/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MTNUCRATIO { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mtnucratio=0.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2" - } else { - container "quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2' : + 'quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -27,17 +17,18 @@ process MTNUCRATIO { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mtnucratio \\ - $options.args \\ + $args \\ $bam \\ $mt_id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') + "${task.process}": + mtnucratio: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') END_VERSIONS """ } diff --git a/modules/multiqc/functions.nf b/modules/multiqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/multiqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 0861aa59..3dceb162 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MULTIQC { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path multiqc_files @@ -27,12 +16,13 @@ process MULTIQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - multiqc -f $options.args . + multiqc -f $args . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) END_VERSIONS """ } diff --git a/modules/mummer/functions.nf b/modules/mummer/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mummer/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf index e46fd799..f4f3bb18 100644 --- a/modules/mummer/main.nf +++ b/modules/mummer/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '3.23' +def VERSION = '3.23' // Version information not provided by tool on CLI process MUMMER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mummer=3.23" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" - } else { - container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" input: tuple val(meta), path(ref), path(query) @@ -28,7 +17,8 @@ process MUMMER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") @@ -42,14 +32,14 @@ process MUMMER { gzip -c -d $query > $fasta_name_query fi mummer \\ - $options.args \\ + $args \\ $fasta_name_ref \\ $fasta_name_query \\ > ${prefix}.coords cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) + "${task.process}": + mummer: $VERSION END_VERSIONS """ } diff --git a/modules/muscle/functions.nf b/modules/muscle/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/muscle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index 6ffb97ac..a50f5cb3 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MUSCLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::muscle=3.8.1551" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6" - } else { - container "quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6' : + 'quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6' }" input: tuple val(meta), path(fasta) @@ -33,18 +22,18 @@ process MUSCLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def fasta_out = options.args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' - def clw_out = options.args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' - def msf_out = options.args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' - def phys_out = options.args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' - def phyi_out = options.args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' - def html_out = options.args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' - def tree_out = options.args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def fasta_out = args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' + def clw_out = args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' + def msf_out = args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' + def phys_out = args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' + def phyi_out = args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' + def html_out = args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' + def tree_out = args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' """ muscle \\ - $options.args \\ + $args \\ -in $fasta \\ $fasta_out \\ $clw_out \\ @@ -55,8 +44,8 @@ process MUSCLE { $tree_out \\ -loga muscle_msa.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') + "${task.process}": + muscle: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') END_VERSIONS """ } diff --git a/modules/nanolyse/functions.nf b/modules/nanolyse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanolyse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index 271592f7..f29eeb77 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOLYSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nanolyse=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0" - } else { - container "quay.io/biocontainers/nanolyse:1.2.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0' : + 'quay.io/biocontainers/nanolyse:1.2.0--py_0' }" input: tuple val(meta), path(fastq) @@ -28,14 +17,15 @@ process NANOLYSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") + "${task.process}": + nanolyse: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") END_VERSIONS """ } diff --git a/modules/nanoplot/functions.nf b/modules/nanoplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanoplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index 16e2248c..36577d8a 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::nanoplot=1.38.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0' : + 'quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0' }" input: tuple val(meta), path(ontfile) @@ -29,16 +18,17 @@ process NANOPLOT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def input_file = ("$ontfile".endsWith(".fastq.gz")) ? "--fastq ${ontfile}" : ("$ontfile".endsWith(".txt")) ? "--summary ${ontfile}" : '' """ NanoPlot \\ - $options.args \\ + $args \\ -t $task.cpus \\ $input_file cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') + "${task.process}": + nanoplot: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ncbigenomedownload/functions.nf b/modules/ncbigenomedownload/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ncbigenomedownload/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf index ffa53871..466c8d09 100644 --- a/modules/ncbigenomedownload/main.nf +++ b/modules/ncbigenomedownload/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NCBIGENOMEDOWNLOAD { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ncbi-genome-download=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1" - } else { - container "quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1' : + 'quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1' }" input: val meta @@ -39,18 +28,19 @@ process NCBIGENOMEDOWNLOAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def accessions_opt = accessions ? "-A ${accessions}" : "" """ ncbi-genome-download \\ - $options.args \\ + $args \\ $accessions_opt \\ --output-folder ./ \\ --flat-output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( ncbi-genome-download --version ) + "${task.process}": + ncbigenomedownload: \$( ncbi-genome-download --version ) END_VERSIONS """ } diff --git a/modules/nextclade/functions.nf b/modules/nextclade/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/nextclade/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 6fc6efc4..317d393d 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NEXTCLADE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nextclade_js=0.14.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0' : + 'quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0' }" input: tuple val(meta), path(fasta) @@ -30,10 +19,11 @@ process NEXTCLADE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ nextclade \\ - $options.args \\ + $args \\ --jobs $task.cpus \\ --input-fasta $fasta \\ --output-json ${prefix}.json \\ @@ -43,8 +33,8 @@ process NEXTCLADE { --output-tree ${prefix}.tree.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(nextclade --version 2>&1) + "${task.process}": + nextclade: \$(nextclade --version 2>&1) END_VERSIONS """ } diff --git a/modules/ngmaster/functions.nf b/modules/ngmaster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ngmaster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf index 1897b5f3..0884b55c 100644 --- a/modules/ngmaster/main.nf +++ b/modules/ngmaster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NGMASTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ngmaster=0.5.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1' : + 'quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process NGMASTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ngmaster \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) + "${task.process}": + ngmaster: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) END_VERSIONS """ } diff --git a/modules/nucmer/functions.nf b/modules/nucmer/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nucmer/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf index 49a275f4..bb5dcb7d 100644 --- a/modules/nucmer/main.nf +++ b/modules/nucmer/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NUCMER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mummer=3.23" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" - } else { - container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" input: tuple val(meta), path(ref), path(query) @@ -27,7 +16,8 @@ process NUCMER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def is_compressed_query = query.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") @@ -43,13 +33,13 @@ process NUCMER { nucmer \\ -p $prefix \\ --coords \\ - $options.args \\ + $args \\ $fasta_name_ref \\ $fasta_name_query cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) + "${task.process}": + nucmer: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) END_VERSIONS """ } diff --git a/modules/optitype/functions.nf b/modules/optitype/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/optitype/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 083b03a7..24be66a7 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process OPTITYPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::optitype=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/optitype:1.3.5--0" - } else { - container "quay.io/biocontainers/optitype:1.3.5--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/optitype:1.3.5--0' : + 'quay.io/biocontainers/optitype:1.3.5--0' }" input: tuple val(meta), path(bam) @@ -26,30 +15,32 @@ process OPTITYPE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - # Create a config for OptiType on a per sample basis with options.args2 + # Create a config for OptiType on a per sample basis with task.ext.args2 #Doing it old school now echo "[mapping]" > config.ini echo "razers3=razers3" >> config.ini echo "threads=$task.cpus" >> config.ini echo "[ilp]" >> config.ini - echo "$options.args2" >> config.ini + echo "$args2" >> config.ini echo "threads=1" >> config.ini echo "[behavior]" >> config.ini echo "deletebam=true" >> config.ini echo "unpaired_weight=0" >> config.ini echo "use_discordant=false" >> config.ini - # Run the actual OptiType typing with options.args - OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $options.args --prefix $prefix --outdir $prefix + # Run the actual OptiType typing with args + OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $args --prefix $prefix --outdir $prefix #Couldn't find a nicer way of doing this cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") + "${task.process}": + optitype: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") END_VERSIONS """ } diff --git a/modules/pairix/functions.nf b/modules/pairix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index 4bfd3b0d..c1b9658c 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairix=0.3.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3" - } else { - container "quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3' : + 'quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3' }" input: tuple val(meta), path(pair) @@ -26,14 +15,15 @@ process PAIRIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pairix \\ - $options.args \\ + $args \\ $pair cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') + "${task.process}": + pairix: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/pairtools/dedup/functions.nf b/modules/pairtools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index eabf24dd..5ee9dc43 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_DEDUP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools dedup \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ --output-stats ${prefix}.pairs.stat \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/flip/functions.nf b/modules/pairtools/flip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/flip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 50cfdfd2..452800cc 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_FLIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(sam) @@ -27,18 +16,19 @@ process PAIRTOOLS_FLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ flip \\ -c $chromsizes \\ - $options.args \\ + $args \\ -o ${prefix}.flip.gz \\ $sam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/parse/functions.nf b/modules/pairtools/parse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/parse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index cd6099e1..1d34d42c 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_PARSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(bam) @@ -28,19 +17,20 @@ process PAIRTOOLS_PARSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ parse \\ -c $chromsizes \\ - $options.args \\ + $args \\ --output-stats ${prefix}.pairsam.stat \\ -o ${prefix}.pairsam.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/restrict/functions.nf b/modules/pairtools/restrict/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/restrict/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index b1b21da7..9fcc245c 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_RESTRICT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(pairs) @@ -27,18 +16,19 @@ process PAIRTOOLS_RESTRICT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ restrict \\ -f $frag \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ $pairs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/select/functions.nf b/modules/pairtools/select/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/select/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index dec29573..f699afa3 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SELECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_SELECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools select \\ - "$options.args" \\ + "$args" \\ -o ${prefix}.selected.pairs.gz \\ --output-rest ${prefix}.unselected.pairs.gz \\ ${input} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/sort/functions.nf b/modules/pairtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 996bcb0b..5caa5b74 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SORT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -26,20 +15,21 @@ process PAIRTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ pairtools \\ sort \\ - $options.args \\ + $args \\ --nproc $task.cpus \\ --memory "$mem" \\ -o ${prefix}.pairs.gz \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pangolin/functions.nf b/modules/pangolin/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pangolin/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index edf67dd7..99a68e09 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PANGOLIN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::pangolin=3.1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' - } else { - container 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' : + 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process PANGOLIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pangolin \\ $fasta\\ --outfile ${prefix}.pangolin.csv \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pangolin --version | sed "s/pangolin //g") + "${task.process}": + pangolin: \$(pangolin --version | sed "s/pangolin //g") END_VERSIONS """ } diff --git a/modules/paraclu/functions.nf b/modules/paraclu/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/paraclu/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf index 6d65a784..a2003834 100644 --- a/modules/paraclu/main.nf +++ b/modules/paraclu/main.nf @@ -1,22 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) +def VERSION = '10' // Version information not provided by tool on CLI process PARACLU { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::paraclu=10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1" - } else { - container "quay.io/biocontainers/paraclu:10--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1' : + 'quay.io/biocontainers/paraclu:10--h9a82719_1' }" input: tuple val(meta), path(bed) @@ -27,8 +18,8 @@ process PARACLU { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def VERSION=10 + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P @@ -38,8 +29,8 @@ process PARACLU { awk -F '\t' '{print \$1"\t"\$3"\t"\$4"\t"\$1":"\$3".."\$4","\$2"\t"\$6"\t"\$2}' ${prefix}.clustered.simplified > ${prefix}.clustered.simplified.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION + "${task.process}": + paraclu: $VERSION END_VERSIONS """ } diff --git a/modules/pbbam/pbmerge/functions.nf b/modules/pbbam/pbmerge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbbam/pbmerge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf index 63cd2ffe..970128cb 100644 --- a/modules/pbbam/pbmerge/main.nf +++ b/modules/pbbam/pbmerge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBBAM_PBMERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbbam=1.7.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1" - } else { - container "quay.io/biocontainers/pbbam:1.7.0--h058f120_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1' : + 'quay.io/biocontainers/pbbam:1.7.0--h058f120_1' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process PBBAM_PBMERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pbmerge \\ -o ${prefix}.bam \\ - $options.args \\ + $args \\ *.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - pbbam/pbmerge: \$( pbmerge --version|sed 's/pbmerge //' ) + "${task.process}": + pbbam: \$( pbmerge --version|sed 's/pbmerge //' ) END_VERSIONS """ } diff --git a/modules/pbccs/functions.nf b/modules/pbccs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbccs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 55eacd76..83e56d96 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBCCS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbccs=6.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0' : + 'quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0' }" input: tuple val(meta), path(bam), path(pbi) @@ -32,7 +21,8 @@ process PBCCS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ccs \\ $bam \\ @@ -42,11 +32,11 @@ process PBCCS { --metrics-json ${prefix}.chunk${chunk_num}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') + "${task.process}": + pbccs: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/peddy/functions.nf b/modules/peddy/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/peddy/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf index 4331ed9d..0a6c3384 100644 --- a/modules/peddy/main.nf +++ b/modules/peddy/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PEDDY { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::peddy=0.4.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0" - } else { - container "quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0' : + 'quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0' }" input: tuple val(meta), path(vcf), path(vcf_tbi) @@ -30,18 +19,19 @@ process PEDDY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ peddy \\ - $options.args \\ + $args \\ --plot \\ -p $task.cpus \\ $vcf \\ $ped cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) + "${task.process}": + peddy: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) END_VERSIONS """ } diff --git a/modules/phantompeakqualtools/functions.nf b/modules/phantompeakqualtools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/phantompeakqualtools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index b390bf7e..f2edabc3 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.2.2' +def VERSION = '1.2.2' // Version information not provided by tool on CLI process PHANTOMPEAKQUALTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::phantompeakqualtools=1.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0" - } else { - container "quay.io/biocontainers/phantompeakqualtools:1.2.2--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0' : + 'quay.io/biocontainers/phantompeakqualtools:1.2.2--0' }" input: tuple val(meta), path(bam) @@ -30,13 +19,15 @@ process PHANTOMPEAKQUALTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + phantompeakqualtools: $VERSION END_VERSIONS """ } diff --git a/modules/phyloflash/functions.nf b/modules/phyloflash/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/phyloflash/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf index 894c16a2..c507dd14 100644 --- a/modules/phyloflash/main.nf +++ b/modules/phyloflash/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PHYLOFLASH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::phyloflash=3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1" - } else { - container "quay.io/biocontainers/phyloflash:3.4--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1' : + 'quay.io/biocontainers/phyloflash:3.4--hdfd78af_1' }" input: tuple val(meta), path(reads) @@ -28,12 +17,12 @@ process PHYLOFLASH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ phyloFlash.pl \\ - $options.args \\ + $args \\ -read1 ${reads[0]} \\ -lib $prefix \\ -interleaved \\ @@ -44,14 +33,14 @@ process PHYLOFLASH { mv ${prefix}.* $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } else { """ phyloFlash.pl \\ - $options.args \\ + $args \\ -read1 ${reads[0]} \\ -read2 ${reads[1]} \\ -lib $prefix \\ @@ -62,24 +51,22 @@ process PHYLOFLASH { mv ${prefix}.* $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } stub: - - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir ${prefix} touch ${prefix}/${prefix}.SSU.collection.fasta touch ${prefix}/${prefix}.phyloFlash cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } diff --git a/modules/picard/collecthsmetrics/functions.nf b/modules/picard/collecthsmetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collecthsmetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 1f7ad8e6..adb82d8c 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTHSMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.26.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.2--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -30,7 +19,8 @@ process PICARD_COLLECTHSMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "-R $fasta" : "" def avail_mem = 3 @@ -43,7 +33,7 @@ process PICARD_COLLECTHSMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectHsMetrics \\ - $options.args \\ + $args \\ $reference \\ -BAIT_INTERVALS $bait_intervals \\ -TARGET_INTERVALS $target_intervals \\ @@ -51,8 +41,8 @@ process PICARD_COLLECTHSMETRICS { -OUTPUT ${prefix}_collecthsmetrics.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/collectmultiplemetrics/functions.nf b/modules/picard/collectmultiplemetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectmultiplemetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index dd8fdaca..f52f5885 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTMULTIPLEMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_COLLECTMULTIPLEMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectMultipleMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_COLLECTMULTIPLEMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectMultipleMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectMultipleMetrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/collectwgsmetrics/functions.nf b/modules/picard/collectwgsmetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectwgsmetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 6028feef..94745d2d 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -27,7 +16,8 @@ process PICARD_COLLECTWGSMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectWgsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -38,14 +28,14 @@ process PICARD_COLLECTWGSMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectWgsMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/filtersamreads/functions.nf b/modules/picard/filtersamreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/filtersamreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 68cee34d..8b1d2e6b 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_FILTERSAMREADS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(readlist) @@ -27,7 +16,8 @@ process PICARD_FILTERSAMREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -42,11 +32,11 @@ process PICARD_FILTERSAMREADS { --INPUT $bam \\ --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) { @@ -58,11 +48,11 @@ process PICARD_FILTERSAMREADS { --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ --READ_LIST_FILE $readlist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/markduplicates/functions.nf b/modules/picard/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 130a1e52..d4c5886f 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MARKDUPLICATES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_MARKDUPLICATES { picard \\ -Xmx${avail_mem}g \\ MarkDuplicates \\ - $options.args \\ + $args \\ I=$bam \\ O=${prefix}.bam \\ M=${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/mergesamfiles/functions.nf b/modules/picard/mergesamfiles/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/mergesamfiles/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 355c0bf3..3a2fc620 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MERGESAMFILES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -26,7 +15,8 @@ process PICARD_MERGESAMFILES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 if (!task.memory) { @@ -39,20 +29,20 @@ process PICARD_MERGESAMFILES { picard \\ -Xmx${avail_mem}g \\ MergeSamFiles \\ - $options.args \\ + $args \\ ${'INPUT='+bam_files.join(' INPUT=')} \\ OUTPUT=${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else { """ ln -s ${bam_files[0]} ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/sortsam/functions.nf b/modules/picard/sortsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/sortsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index 939df1c0..b264b927 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -1,23 +1,11 @@ - -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_SORTSAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +16,8 @@ process PICARD_SORTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -44,8 +33,8 @@ process PICARD_SORTSAM { --SORT_ORDER $sort_order cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/pirate/functions.nf b/modules/pirate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pirate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 01a950dd..3bbb1d64 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PIRATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pirate=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1" - } else { - container "quay.io/biocontainers/pirate:1.0.4--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1' : + 'quay.io/biocontainers/pirate:1.0.4--hdfd78af_1' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process PIRATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ PIRATE \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --input ./ \\ --output results/ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) + "${task.process}": + pirate: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) END_VERSIONS """ } diff --git a/modules/plasmidid/functions.nf b/modules/plasmidid/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plasmidid/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 1edc5eeb..290ae549 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLASMIDID { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::plasmidid=1.6.5' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' - } else { - container 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' : + 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' }" input: tuple val(meta), path(scaffold) @@ -34,19 +23,20 @@ process PLASMIDID { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plasmidID \\ -d $fasta \\ -s $prefix \\ -c $scaffold \\ - $options.args \\ + $args \\ -o . mv NO_GROUP/$prefix ./$prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plasmidID --version 2>&1)) + "${task.process}": + plasmidid: \$(echo \$(plasmidID --version 2>&1)) END_VERSIONS """ } diff --git a/modules/plink/extract/functions.nf b/modules/plink/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf index 2e18500a..34b12fca 100644 --- a/modules/plink/extract/main.nf +++ b/modules/plink/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK_EXTRACT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" - } else { - container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" input: tuple val(meta), path(bed), path(bim), path(fam), path(variants) @@ -28,20 +17,21 @@ process PLINK_EXTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" """ plink \\ --bfile ${meta.id} \\ - $options.args \\ + $args \\ --extract $variants \\ --threads $task.cpus \\ --make-bed \\ --out $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') + "${task.process}": + plink: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') END_VERSIONS """ } diff --git a/modules/plink/vcf/functions.nf b/modules/plink/vcf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink/vcf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index a676b723..b6fd03d7 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK_VCF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" - } else { - container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" input: tuple val(meta), path(vcf) @@ -29,18 +18,19 @@ process PLINK_VCF { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plink \\ --vcf ${vcf} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) + "${task.process}": + plink: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) END_VERSIONS """ } diff --git a/modules/plink2/vcf/functions.nf b/modules/plink2/vcf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink2/vcf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf index 869a5587..8101f7dd 100644 --- a/modules/plink2/vcf/main.nf +++ b/modules/plink2/vcf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK2_VCF { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1" - } else { - container "quay.io/biocontainers/plink2:2.00a2.3--h712d239_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1' : + 'quay.io/biocontainers/plink2:2.00a2.3--h712d239_1' }" input: tuple val(meta), path(vcf) @@ -28,16 +17,17 @@ process PLINK2_VCF { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plink2 \\ - $options.args \\ + $args \\ --vcf $vcf \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + "${task.process}": + plink2: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) END_VERSIONS """ } diff --git a/modules/pmdtools/filter/functions.nf b/modules/pmdtools/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pmdtools/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf index 3e363a9c..301f9206 100644 --- a/modules/pmdtools/filter/main.nf +++ b/modules/pmdtools/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PMDTOOLS_FILTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pmdtools=0.60" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5" - } else { - container "quay.io/biocontainers/pmdtools:0.60--hdfd78af_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5' : + 'quay.io/biocontainers/pmdtools:0.60--hdfd78af_5' }" input: tuple val(meta), path(bam), path (bai) @@ -28,8 +17,11 @@ process PMDTOOLS_FILTER { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' def split_cpus = Math.floor(task.cpus/2) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" //threshold and header flags activate filtering function of pmdtools """ @@ -37,22 +29,22 @@ process PMDTOOLS_FILTER { calmd \\ $bam \\ $reference \\ - $options.args \\ + $args \\ -@ ${split_cpus} \\ | pmdtools \\ --threshold $threshold \\ --header \\ - $options.args2 \\ + $args2 \\ | samtools \\ view \\ - $options.args3 \\ + $args3 \\ -Sb \\ - \\ -@ ${split_cpus} \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": pmdtools: \$( pmdtools --version | cut -f2 -d ' ' | sed 's/v//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS diff --git a/modules/porechop/functions.nf b/modules/porechop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/porechop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf index cf564938..2edc5c78 100644 --- a/modules/porechop/main.nf +++ b/modules/porechop/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PORECHOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::porechop=0.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2" - } else { - container "quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2' : + 'quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*.fastq.gz") , emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("*.fastq.gz"), emit: reads + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ porechop \\ - -i ${reads} \\ - -t ${task.cpus} \\ - ${options.args} \\ + -i $reads \\ + -t $task.cpus \\ + $args \\ -o ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( porechop --version ) + "${task.process}": + porechop: \$( porechop --version ) END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/functions.nf b/modules/preseq/lcextrap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/preseq/lcextrap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index f551a549..43f86cf8 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRESEQ_LCEXTRAP { tag "$meta.id" label 'process_medium' label 'error_ignore' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::preseq=3.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1" - } else { - container "quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1' : + 'quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1' }" input: tuple val(meta), path(bam) @@ -28,20 +17,21 @@ process PRESEQ_LCEXTRAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ preseq \\ lc_extrap \\ - $options.args \\ + $args \\ $paired_end \\ -output ${prefix}.ccurve.txt \\ $bam cp .command.err ${prefix}.command.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') + "${task.process}": + preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') END_VERSIONS """ } diff --git a/modules/prodigal/functions.nf b/modules/prodigal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prodigal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index 572ffe92..b09da13c 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRODIGAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prodigal=2.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2" - } else { - container "quay.io/biocontainers/prodigal:2.6.3--h516909a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2' : + 'quay.io/biocontainers/prodigal:2.6.3--h516909a_2' }" input: tuple val(meta), path(genome) @@ -30,10 +19,11 @@ process PRODIGAL { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ prodigal -i "${genome}" \\ - $options.args \\ + $args \\ -f $output_format \\ -d "${prefix}.fna" \\ -o "${prefix}.${output_format}" \\ @@ -41,8 +31,8 @@ process PRODIGAL { -s "${prefix}_all.txt" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') + "${task.process}": + prodigal: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') END_VERSIONS """ } diff --git a/modules/prokka/functions.nf b/modules/prokka/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prokka/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index fb86078c..8fae6367 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PROKKA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prokka=1.14.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0" - } else { - container "quay.io/biocontainers/prokka:1.14.6--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0' : + 'quay.io/biocontainers/prokka:1.14.6--pl526_0' }" input: tuple val(meta), path(fasta) @@ -38,12 +28,13 @@ process PROKKA { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" """ prokka \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --prefix $prefix \\ $proteins_opt \\ @@ -51,8 +42,8 @@ process PROKKA { $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') + "${task.process}": + prokka: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') END_VERSIONS """ } diff --git a/modules/pycoqc/functions.nf b/modules/pycoqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pycoqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pycoqc/main.nf b/modules/pycoqc/main.nf index 2c263d61..e966b31c 100644 --- a/modules/pycoqc/main.nf +++ b/modules/pycoqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYCOQC { tag "$summary" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::pycoqc=2.5.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0" - } else { - container "quay.io/biocontainers/pycoqc:2.5.2--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0' : + 'quay.io/biocontainers/pycoqc:2.5.2--py_0' }" input: path summary @@ -27,16 +16,17 @@ process PYCOQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pycoQC \\ - $options.args \\ + $args \\ -f $summary \\ -o pycoqc.html \\ -j pycoqc.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') + "${task.process}": + pycoqc: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/pydamage/analyze/functions.nf b/modules/pydamage/analyze/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/analyze/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 9cfb8a1a..c55616db 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_ANALYZE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,17 +15,18 @@ process PYDAMAGE_ANALYZE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pydamage \\ analyze \\ - $options.args \\ + $args \\ -p $task.cpus \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/pydamage/filter/functions.nf b/modules/pydamage/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 6cd7ae7a..2e0afac9 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(csv) @@ -26,17 +15,18 @@ process PYDAMAGE_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pydamage \\ filter \\ - $options.args \\ + $args \\ $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/qcat/functions.nf b/modules/qcat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qcat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index b650fb8c..9f53f0cb 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qcat=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0" - } else { - container "quay.io/biocontainers/qcat:1.1.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0' : + 'quay.io/biocontainers/qcat:1.1.0--py_0' }" input: tuple val(meta), path(reads) @@ -27,7 +16,8 @@ process QCAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ## Unzip fastq file ## qcat doesn't support zipped files yet @@ -47,8 +37,8 @@ process QCAT { gzip fastq/* cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') + "${task.process}": + qcat: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/bamqc/functions.nf b/modules/qualimap/bamqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/bamqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index d33f1e67..a47fde7e 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_BAMQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process QUALIMAP_BAMQC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' def memory = task.memory.toGiga() + "G" @@ -47,7 +37,7 @@ process QUALIMAP_BAMQC { qualimap \\ --java-mem-size=$memory \\ bamqc \\ - $options.args \\ + $args \\ -bam $bam \\ $regions \\ -p $strandedness \\ @@ -56,8 +46,8 @@ process QUALIMAP_BAMQC { -nt $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/rnaseq/functions.nf b/modules/qualimap/rnaseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/rnaseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 9492cec6..459f3da5 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_RNASEQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process QUALIMAP_RNASEQ { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" @@ -44,7 +34,7 @@ process QUALIMAP_RNASEQ { qualimap \\ --java-mem-size=$memory \\ rnaseq \\ - $options.args \\ + $args \\ -bam $bam \\ -gtf $gtf \\ -p $strandedness \\ @@ -52,8 +42,8 @@ process QUALIMAP_RNASEQ { -outdir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/quast/functions.nf b/modules/quast/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/quast/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 072d649d..43caca3d 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUAST { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::quast=5.0.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' - } else { - container 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' : + 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' }" input: path consensus @@ -30,7 +19,8 @@ process QUAST { path "versions.yml" , emit: versions script: - prefix = options.suffix ?: software + def args = task.ext.args ?: '' + prefix = task.ext.suffix ?: 'quast' def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' """ @@ -39,12 +29,14 @@ process QUAST { $reference \\ $features \\ --threads $task.cpus \\ - $options.args \\ + $args \\ ${consensus.join(' ')} + ln -s ${prefix}/report.tsv + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') + "${task.process}": + quast: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/racon/functions.nf b/modules/racon/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/racon/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/racon/main.nf b/modules/racon/main.nf index 60a5061e..5936fac0 100644 --- a/modules/racon/main.nf +++ b/modules/racon/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RACON { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::racon=1.4.20" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1" - } else { - container "quay.io/biocontainers/racon:1.4.20--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1' : + 'quay.io/biocontainers/racon:1.4.20--h9a82719_1' }" input: tuple val(meta), path(reads), path(assembly), path(paf) @@ -26,20 +15,21 @@ process RACON { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - racon -t "${task.cpus}" \\ + racon -t "$task.cpus" \\ "${reads}" \\ "${paf}" \\ - $options.args \\ + $args \\ "${assembly}" > \\ ${prefix}_assembly_consensus.fasta gzip -n ${prefix}_assembly_consensus.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( racon --version 2>&1 | sed 's/^.*v//' ) + "${task.process}": + racon: \$( racon --version 2>&1 | sed 's/^.*v//' ) END_VERSIONS """ } diff --git a/modules/rapidnj/functions.nf b/modules/rapidnj/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rapidnj/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index aa23b56e..04a08227 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.3.2' // No version information printed +def VERSION = '2.3.2' // Version information not provided by tool on CLI process RAPIDNJ { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rapidnj=2.3.2 conda-forge::biopython=1.78" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } else { - container "quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' : + 'quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' }" input: path alignment @@ -28,20 +17,21 @@ process RAPIDNJ { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ python \\ -c 'from Bio import SeqIO; SeqIO.convert("$alignment", "fasta", "alignment.sth", "stockholm")' rapidnj \\ alignment.sth \\ - $options.args \\ + $args \\ -i sth \\ -c $task.cpus \\ -x rapidnj_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + rapidnj: $VERSION biopython: \$(python -c "import Bio; print(Bio.__version__)") END_VERSIONS """ diff --git a/modules/rasusa/functions.nf b/modules/rasusa/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rasusa/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index b9ba0b13..b43792ee 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RASUSA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rasusa=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1" - } else { - container "quay.io/biocontainers/rasusa:0.3.0--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1' : + 'quay.io/biocontainers/rasusa:0.3.0--h779adbc_1' }" input: tuple val(meta), path(reads), val(genome_size) @@ -27,18 +16,19 @@ process RASUSA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ rasusa \\ - $options.args \\ + $args \\ --coverage $depth_cutoff \\ --genome-size $genome_size \\ --input $reads \\ $output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") + "${task.process}": + rasusa: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") END_VERSIONS """ } diff --git a/modules/raxmlng/functions.nf b/modules/raxmlng/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/raxmlng/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index f607b506..62b6c78a 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RAXMLNG { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::raxml-ng=1.0.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0" - } else { - container "quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0' : + 'quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0' }" input: path alignment @@ -26,16 +15,17 @@ process RAXMLNG { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ raxml-ng \\ - $options.args \\ + $args \\ --msa $alignment \\ --threads $task.cpus \\ --prefix output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') + "${task.process}": + raxmlng: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') END_VERSIONS """ } diff --git a/modules/rmarkdownnotebook/functions.nf b/modules/rmarkdownnotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rmarkdownnotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf index 4bded58c..9a7db505 100644 --- a/modules/rmarkdownnotebook/main.nf +++ b/modules/rmarkdownnotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' -include { dump_params_yml; indent_code_block } from "./parametrize" - -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true +include { dump_params_yml; indent_code_block } from "./parametrize" process RMARKDOWNNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //yaml and rmarkdown R packages. conda (params.enable_conda ? "r-base=4.1.0 r-rmarkdown=2.9 r-yaml=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } else { - container "quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' : + 'quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' }" input: tuple val(meta), path(notebook) @@ -37,7 +24,11 @@ process RMARKDOWNNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -45,14 +36,14 @@ process RMARKDOWNNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -73,9 +64,9 @@ process RMARKDOWNNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" # Work around https://github.com/rstudio/rmarkdown/issues/1508 # If the symbolic link is not replaced by a physical file @@ -90,7 +81,7 @@ process RMARKDOWNNOTEBOOK { EOF cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": rmarkdown: \$(Rscript -e "cat(paste(packageVersion('rmarkdown'), collapse='.'))") END_VERSIONS """ diff --git a/modules/roary/functions.nf b/modules/roary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/roary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/roary/main.nf b/modules/roary/main.nf index 9dc948fb..a05973eb 100644 --- a/modules/roary/main.nf +++ b/modules/roary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ROARY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::roary=3.13.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/roary:3.13.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0' : + 'quay.io/biocontainers/roary:3.13.0--pl526h516909a_0' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process ROARY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ roary \\ - $options.args \\ + $args \\ -p $task.cpus \\ -f results/ \\ $gff cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( roary --version ) + "${task.process}": + roary: \$( roary --version ) END_VERSIONS """ } diff --git a/modules/rsem/calculateexpression/functions.nf b/modules/rsem/calculateexpression/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/calculateexpression/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index f19392f7..659082fa 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_CALCULATEEXPRESSION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: tuple val(meta), path(reads) @@ -34,7 +23,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -50,14 +40,14 @@ process RSEM_CALCULATEEXPRESSION { --temporary-folder ./tmp/ \\ $strandedness \\ $paired_end \\ - $options.args \\ + $args \\ $reads \\ \$INDEX \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rsem/preparereference/functions.nf b/modules/rsem/preparereference/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/preparereference/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 7e671207..95597b74 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_PREPAREREFERENCE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: path fasta, stageAs: "rsem/*" @@ -28,9 +17,11 @@ process RSEM_PREPAREREFERENCE { path "versions.yml" , emit: versions script: - def args = options.args.tokenize() - if (args.contains('--star')) { - args.removeIf { it.contains('--star') } + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args_list = args.tokenize() + if (args_list.contains('--star')) { + args_list.removeIf { it.contains('--star') } def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' """ STAR \\ @@ -40,18 +31,18 @@ process RSEM_PREPAREREFERENCE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args2 + $args2 rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $fasta \\ rsem/genome cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ @@ -60,13 +51,13 @@ process RSEM_PREPAREREFERENCE { rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - $options.args \\ + $args \\ $fasta \\ rsem/genome cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rseqc/bamstat/functions.nf b/modules/rseqc/bamstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/bamstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index 64939add..d9d3fa36 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_BAMSTAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process RSEQC_BAMSTAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bam_stat.py \\ -i $bam \\ - $options.args \\ + $args \\ > ${prefix}.bam_stat.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") + "${task.process}": + rseqc: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/inferexperiment/functions.nf b/modules/rseqc/inferexperiment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/inferexperiment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index c5e94943..3b879cfb 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INFEREXPERIMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process RSEQC_INFEREXPERIMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ infer_experiment.py \\ -i $bam \\ -r $bed \\ - $options.args \\ + $args \\ > ${prefix}.infer_experiment.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") + "${task.process}": + rseqc: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/innerdistance/functions.nf b/modules/rseqc/innerdistance/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/innerdistance/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 622cd5cd..88bec499 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INNERDISTANCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -31,27 +20,28 @@ process RSEQC_INNERDISTANCE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (!meta.single_end) { """ inner_distance.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ > stdout.txt head -n 2 stdout.txt > ${prefix}.inner_distance_mean.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } else { """ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionannotation/functions.nf b/modules/rseqc/junctionannotation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionannotation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index 1b75d915..b6949641 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONANNOTATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -33,18 +22,19 @@ process RSEQC_JUNCTIONANNOTATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ junction_annotation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ 2> ${prefix}.junction_annotation.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") + "${task.process}": + rseqc: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionsaturation/functions.nf b/modules/rseqc/junctionsaturation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionsaturation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index fa435aea..58451d2e 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONSATURATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -28,17 +17,18 @@ process RSEQC_JUNCTIONSATURATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ junction_saturation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") + "${task.process}": + rseqc: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readdistribution/functions.nf b/modules/rseqc/readdistribution/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readdistribution/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 0c83fdf0..74af618d 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDISTRIBUTION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process RSEQC_READDISTRIBUTION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ read_distribution.py \\ -i $bam \\ @@ -35,8 +25,8 @@ process RSEQC_READDISTRIBUTION { > ${prefix}.read_distribution.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") + "${task.process}": + rseqc: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readduplication/functions.nf b/modules/rseqc/readduplication/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readduplication/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index bee82682..80fcb150 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDUPLICATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -29,16 +18,17 @@ process RSEQC_READDUPLICATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ read_duplication.py \\ -i $bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") + "${task.process}": + rseqc: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") END_VERSIONS """ } diff --git a/modules/salmon/index/functions.nf b/modules/salmon/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index c3fcef01..b0a2f973 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_INDEX { tag "$transcript_fasta" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: path genome_fasta @@ -27,6 +16,7 @@ process SALMON_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def get_decoy_ids = "grep '^>' $genome_fasta | cut -d ' ' -f 1 > decoys.txt" def gentrome = "gentrome.fa" if (genome_fasta.endsWith('.gz')) { @@ -43,11 +33,11 @@ process SALMON_INDEX { --threads $task.cpus \\ -t $gentrome \\ -d decoys.txt \\ - $options.args \\ + $args \\ -i salmon cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/salmon/quant/functions.nf b/modules/salmon/quant/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/quant/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 7c2e0e17..9557fd24 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_QUANT { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: tuple val(meta), path(reads) @@ -31,7 +20,8 @@ process SALMON_QUANT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = "--index $index" def input_reads = meta.single_end ? "-r $reads" : "-1 ${reads[0]} -2 ${reads[1]}" @@ -68,12 +58,12 @@ process SALMON_QUANT { --libType=$strandedness \\ $reference \\ $input_reads \\ - $options.args \\ + $args \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/samblaster/functions.nf b/modules/samblaster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samblaster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf index 4481d8cd..8445b0d0 100644 --- a/modules/samblaster/main.nf +++ b/modules/samblaster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMBLASTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" - } else { - container "quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' : + 'quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' }" input: tuple val(meta), path(bam) @@ -26,16 +15,19 @@ process SAMBLASTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" """ - samtools view -h $options.args2 $bam | \\ - samblaster $options.args | \\ - samtools view $options.args3 -Sb - >${prefix}.bam + samtools view -h $args2 $bam | \\ + samblaster $args | \\ + samtools view $args3 -Sb - >${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) + "${task.process}": + samblaster: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/samtools/ampliconclip/functions.nf b/modules/samtools/ampliconclip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/ampliconclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 4cf98d3f..87d6ff8b 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_AMPLICONCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -31,13 +20,14 @@ process SAMTOOLS_AMPLICONCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" """ samtools \\ ampliconclip \\ - $options.args \\ + $args \\ $rejects \\ $stats \\ -b $bed \\ @@ -45,8 +35,8 @@ process SAMTOOLS_AMPLICONCLIP { $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/bam2fq/functions.nf b/modules/samtools/bam2fq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/bam2fq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 48e3249c..20e83a14 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_BAM2FQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(inputbam) @@ -27,13 +16,14 @@ process SAMTOOLS_BAM2FQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (split){ """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ -1 ${prefix}_1.fq.gz \\ -2 ${prefix}_2.fq.gz \\ @@ -42,21 +32,21 @@ process SAMTOOLS_BAM2FQ { $inputbam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ $inputbam >${prefix}_interleaved.fq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/depth/functions.nf b/modules/samtools/depth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/depth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index 9c46b011..f336547f 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_DEPTH { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,18 +15,19 @@ process SAMTOOLS_DEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ samtools \\ depth \\ - $options.args \\ + $args \\ -o ${prefix}.tsv \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/faidx/functions.nf b/modules/samtools/faidx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/faidx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 80708084..c53373a9 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FAIDX { tag "$fasta" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: path fasta @@ -26,11 +15,12 @@ process SAMTOOLS_FAIDX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ samtools faidx $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fastq/functions.nf b/modules/samtools/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index fb7e3904..bdbf53e4 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,18 +15,19 @@ process SAMTOOLS_FASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" """ samtools fastq \\ - $options.args \\ + $args \\ --threads ${task.cpus-1} \\ $endedness \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fixmate/functions.nf b/modules/samtools/fixmate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fixmate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index af1cf829..180833f4 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FIXMATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,20 +15,21 @@ process SAMTOOLS_FIXMATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" """ samtools \\ fixmate \\ - $options.args \\ + $args \\ --threads ${task.cpus-1} \\ $bam \\ ${prefix}.bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/flagstat/functions.nf b/modules/samtools/flagstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/flagstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index 072a135f..03721d0b 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FLAGSTAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_FLAGSTAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools flagstat --threads ${task.cpus-1} $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/idxstats/functions.nf b/modules/samtools/idxstats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/idxstats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index fa0e7dc3..cd068679 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_IDXSTATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_IDXSTATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools idxstats $bam > ${bam}.idxstats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/index/functions.nf b/modules/samtools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index d66e4513..b033e225 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input) @@ -28,12 +17,13 @@ process SAMTOOLS_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - samtools index -@ ${task.cpus-1} $options.args $input + samtools index -@ ${task.cpus-1} $args $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/merge/functions.nf b/modules/samtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index ab641bb9..8eeb64a2 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input_files) @@ -28,15 +17,16 @@ process SAMTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge --threads ${task.cpus-1} $options.args ${reference} ${prefix}.${file_type} $input_files + samtools merge --threads ${task.cpus-1} $args ${reference} ${prefix}.${file_type} $input_files cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/mpileup/functions.nf b/modules/samtools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 081682ed..5f6e2d49 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process SAMTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ samtools mpileup \\ --fasta-ref $fasta \\ --output ${prefix}.mpileup \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/sort/functions.nf b/modules/samtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index f980b472..623f10b6 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,12 +15,13 @@ process SAMTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam + samtools sort $args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/stats/functions.nf b/modules/samtools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index e0a2b50d..83c87002 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_STATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input), path(input_index) @@ -27,13 +16,14 @@ process SAMTOOLS_STATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def reference = fasta ? "--reference ${fasta}" : "" """ samtools stats --threads ${task.cpus-1} ${reference} ${input} > ${input}.stats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/view/functions.nf b/modules/samtools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index e5ff5546..464edd09 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input) @@ -28,15 +17,16 @@ process SAMTOOLS_VIEW { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ - samtools view --threads ${task.cpus-1} ${reference} $options.args $input > ${prefix}.${file_type} + samtools view --threads ${task.cpus-1} ${reference} $args $input > ${prefix}.${file_type} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/scoary/functions.nf b/modules/scoary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/scoary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf index 5720b4e5..8fed0119 100644 --- a/modules/scoary/main.nf +++ b/modules/scoary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SCOARY { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::scoary=1.6.16" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2" - } else { - container "quay.io/biocontainers/scoary:1.6.16--py_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2' : + 'quay.io/biocontainers/scoary:1.6.16--py_2' }" input: tuple val(meta), path(genes), path(traits) @@ -27,19 +16,20 @@ process SCOARY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def newick_tree = tree ? "-n ${tree}" : "" """ scoary \\ - $options.args \\ + $args \\ --no-time \\ --threads $task.cpus \\ --traits $traits \\ --genes $genes cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( scoary --version 2>&1 ) + "${task.process}": + scoary: \$( scoary --version 2>&1 ) END_VERSIONS """ } diff --git a/modules/seacr/callpeak/functions.nf b/modules/seacr/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seacr/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 97bf1c0b..328e4e6c 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.3' +def VERSION = '1.3' // Version information not provided by tool on CLI process SEACR_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seacr=1.3 conda-forge::r-base=4.0.2 bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0" - } else { - container 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' : + 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' }" input: tuple val(meta), path(bedgraph), path(ctrlbedgraph) @@ -29,17 +18,19 @@ process SEACR_CALLPEAK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ $bedgraph \\ $function_switch \\ - $options.args \\ + $args \\ $prefix + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seacr: $VERSION bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') END_VERSIONS diff --git a/modules/seqkit/split2/functions.nf b/modules/seqkit/split2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqkit/split2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 80f55bb6..fc027793 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0" - } else { - container "quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0' : + 'quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0' }" input: tuple val(meta), path(reads) @@ -26,34 +15,35 @@ process SEQKIT_SPLIT2 { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if(meta.single_end){ """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -1 $reads \\ --out-dir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -1 ${reads[0]} \\ -2 ${reads[1]} \\ --out-dir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqsero2/functions.nf b/modules/seqsero2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqsero2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf index 3748a6e4..a8dd731e 100644 --- a/modules/seqsero2/main.nf +++ b/modules/seqsero2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQSERO2 { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqsero2=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0" - } else { - container "quay.io/biocontainers/seqsero2:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0' : + 'quay.io/biocontainers/seqsero2:1.2.1--py_0' }" input: tuple val(meta), path(seqs) @@ -28,18 +17,19 @@ process SEQSERO2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ SeqSero2_package.py \\ - $options.args \\ + $args \\ -d results/ \\ -n $prefix \\ -p $task.cpus \\ -i $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) + "${task.process}": + seqsero2: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) END_VERSIONS """ } diff --git a/modules/seqtk/mergepe/functions.nf b/modules/seqtk/mergepe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/mergepe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf index fb8eb382..954bed5c 100644 --- a/modules/seqtk/mergepe/main.nf +++ b/modules/seqtk/mergepe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_MERGEPE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -26,27 +15,28 @@ process SEQTK_MERGEPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ ln -s ${reads} ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqtk \\ mergepe \\ - $options.args \\ + $args \\ ${reads} \\ | gzip -n >> ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/sample/functions.nf b/modules/seqtk/sample/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/sample/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 3b039fb9..83a107d0 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SAMPLE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -27,43 +16,44 @@ process SEQTK_SAMPLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ seqtk \\ sample \\ - $options.args \\ + $args \\ $reads \\ $sample_size \\ | gzip --no-name > ${prefix}.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { - if (!(options.args ==~ /.*-s[0-9]+.*/)) { - options.args = options.args + " -s100" + if (!(args ==~ /.*-s[0-9]+.*/)) { + args += " -s100" } """ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[0]} \\ $sample_size \\ | gzip --no-name > ${prefix}_1.fastq.gz \\ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[1]} \\ $sample_size \\ | gzip --no-name > ${prefix}_2.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/subseq/functions.nf b/modules/seqtk/subseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/subseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index df8783de..1d93b061 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SUBSEQ { tag '$sequences' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: path sequences @@ -27,7 +16,8 @@ process SEQTK_SUBSEQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ?: '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { ext = "fq" @@ -35,14 +25,14 @@ process SEQTK_SUBSEQ { """ seqtk \\ subseq \\ - $options.args \\ + $args \\ $sequences \\ $filter_list | \\ gzip --no-name > ${sequences}${prefix}.${ext}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/bam2seqz/functions.nf b/modules/sequenzautils/bam2seqz/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/bam2seqz/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 61ca70c6..9082d426 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_BAM2SEQZ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(normalbam), path(tumourbam) @@ -28,11 +17,12 @@ process SEQUENZAUTILS_BAM2SEQZ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ sequenza-utils \\ bam2seqz \\ - $options.args \\ + $args \\ -n $normalbam \\ -t $tumourbam \\ --fasta $fasta \\ @@ -40,8 +30,8 @@ process SEQUENZAUTILS_BAM2SEQZ { -o ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/gcwiggle/functions.nf b/modules/sequenzautils/gcwiggle/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/gcwiggle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index c952bb70..43358c43 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_GCWIGGLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process SEQUENZAUTILS_GCWIGGLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ sequenza-utils \\ gc_wiggle \\ - $options.args \\ + $args \\ --fasta $fasta \\ -o ${prefix}.wig.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqwish/induce/functions.nf b/modules/seqwish/induce/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqwish/induce/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index aaabce51..fb25a96e 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,24 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.7.1' +def VERSION = '0.7.1' // Version information not provided by tool on CLI process SEQWISH_INDUCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0" - } else { - container "quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0' : + 'quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0' }" input: tuple val(meta), path(paf), path(fasta) @@ -27,20 +17,20 @@ process SEQWISH_INDUCE { tuple val(meta), path("*.gfa"), emit: gfa path "versions.yml" , emit: versions - script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ seqwish \\ --threads $task.cpus \\ --paf-alns=$paf \\ --seqs=$fasta \\ --gfa=${prefix}.gfa \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seqwish: $VERSION END_VERSIONS """ } diff --git a/modules/shovill/functions.nf b/modules/shovill/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/shovill/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index 48425f9f..1a56df27 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SHOVILL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::shovill=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/shovill:1.1.0--0" - } else { - container "quay.io/biocontainers/shovill:1.1.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/shovill:1.1.0--0' : + 'quay.io/biocontainers/shovill:1.1.0--0' }" input: tuple val(meta), path(reads) @@ -30,20 +19,21 @@ process SHOVILL { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ shovill \\ --R1 ${reads[0]} \\ --R2 ${reads[1]} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') + "${task.process}": + shovill: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') END_VERSIONS """ } diff --git a/modules/snpdists/functions.nf b/modules/snpdists/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpdists/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index 506a922a..de79e89b 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPDISTS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snp-dists=0.8.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0" - } else { - container "quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0' : + 'quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0' }" input: tuple val(meta), path(alignment) @@ -26,15 +15,16 @@ process SNPDISTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ snp-dists \\ - $options.args \\ + $args \\ $alignment > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') + "${task.process}": + snpdists: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') END_VERSIONS """ } diff --git a/modules/snpeff/functions.nf b/modules/snpeff/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpeff/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 3a1f6a52..2cd023f6 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,26 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.snpeff_tag = "" - process SNPEFF { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/snpeff:5.0--hdfd78af_1" - } + if (task.ext.use_cache) { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" } else { - container "nfcore/snpeff:${params.snpeff_tag}" + container "nfcore/snpeff:${task.ext.snpeff_tag}" } input: @@ -34,26 +21,28 @@ process SNPEFF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[snpEff] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - cache = params.use_cache ? "-dataDir \${PWD}/${snpeff_cache}" : "" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" """ - snpEff -Xmx${avail_mem}g \\ + snpEff \\ + -Xmx${avail_mem}g \\ $db \\ - $options.args \\ + $args \\ -csvStats ${prefix}.csv \\ - $cache \\ + $dir_cache \\ $vcf \\ > ${prefix}.ann.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') + "${task.process}": + snpeff: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') END_VERSIONS """ } diff --git a/modules/snpsites/functions.nf b/modules/snpsites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpsites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpsites/main.nf b/modules/snpsites/main.nf index 543ee01c..60e694ac 100644 --- a/modules/snpsites/main.nf +++ b/modules/snpsites/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPSITES { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::snp-sites=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0" - } else { - container "quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0' : + 'quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0' }" input: path alignment @@ -27,10 +16,11 @@ process SNPSITES { env CONSTANT_SITES, emit: constant_sites_string script: + def args = task.ext.args ?: '' """ snp-sites \\ $alignment \\ - $options.args \\ + $args \\ > filtered_alignment.fas echo \$(snp-sites -C $alignment) > constant.sites.txt @@ -38,8 +28,8 @@ process SNPSITES { CONSTANT_SITES=\$(cat constant.sites.txt) cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') + "${task.process}": + snpsites: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') END_VERSIONS """ } diff --git a/modules/sortmerna/functions.nf b/modules/sortmerna/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sortmerna/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 9602bb53..83cd8092 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SORTMERNA { tag "$meta.id" label "process_high" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sortmerna=4.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0' : + 'quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0' }" input: tuple val(meta), path(reads) @@ -28,7 +17,8 @@ process SORTMERNA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ sortmerna \\ @@ -38,14 +28,14 @@ process SORTMERNA { --workdir . \\ --aligned rRNA_reads \\ --other non_rRNA_reads \\ - $options.args + $args mv non_rRNA_reads.fq.gz ${prefix}.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } else { @@ -60,15 +50,15 @@ process SORTMERNA { --other non_rRNA_reads \\ --paired_in \\ --out2 \\ - $options.args + $args mv non_rRNA_reads_fwd.fq.gz ${prefix}_1.fastq.gz mv non_rRNA_reads_rev.fq.gz ${prefix}_2.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } diff --git a/modules/spades/functions.nf b/modules/spades/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spades/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spades/main.nf b/modules/spades/main.nf index 836efbda..4663ec55 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPADES { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::spades=3.15.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0" - } else { - container "quay.io/biocontainers/spades:3.15.3--h95f258a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0' : + 'quay.io/biocontainers/spades:3.15.3--h95f258a_0' }" input: tuple val(meta), path(illumina), path(pacbio), path(nanopore) @@ -32,7 +21,8 @@ process SPADES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def maxmem = task.memory.toGiga() def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" @@ -40,7 +30,7 @@ process SPADES { def custom_hmms = hmm ? "--custom-hmms $hmm" : "" """ spades.py \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --memory $maxmem \\ $custom_hmms \\ @@ -73,8 +63,8 @@ process SPADES { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') + "${task.process}": + spades: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/spatyper/functions.nf b/modules/spatyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spatyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index 34207dbf..d7c75ba6 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPATYPER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::spatyper=0.3.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3" - } else { - container "quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3' : + 'quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3' }" input: tuple val(meta), path(fasta) @@ -28,18 +17,19 @@ process SPATYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ spaTyper \\ - $options.args \\ + $args \\ $input_args \\ --fasta $fasta \\ --output ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) + "${task.process}": + spatyper: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) END_VERSIONS """ } diff --git a/modules/sratools/fasterqdump/functions.nf b/modules/sratools/fasterqdump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/fasterqdump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/fasterqdump/main.nf b/modules/sratools/fasterqdump/main.nf index 08ef9045..73e3673d 100644 --- a/modules/sratools/fasterqdump/main.nf +++ b/modules/sratools/fasterqdump/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_FASTERQDUMP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : + 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" input: tuple val(meta), path(sra) @@ -26,6 +15,8 @@ process SRATOOLS_FASTERQDUMP { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" // Paired-end data extracted by fasterq-dump (--split-3 the default) always creates // *_1.fastq *_2.fastq files but sometimes also an additional *.fastq file @@ -39,19 +30,19 @@ process SRATOOLS_FASTERQDUMP { fi fasterq-dump \\ - ${options.args} \\ + $args \\ --threads $task.cpus \\ ${sra.name} pigz \\ - ${options.args2} \\ + $args2 \\ --no-name \\ --processes $task.cpus \\ *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/sratools/prefetch/functions.nf b/modules/sratools/prefetch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/prefetch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/prefetch/main.nf b/modules/sratools/prefetch/main.nf index 207d1e10..1e1eb802 100644 --- a/modules/sratools/prefetch/main.nf +++ b/modules/sratools/prefetch/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_PREFETCH { tag "$id" label 'process_low' label 'error_retry' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' - } else { - container 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' : + 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' }" input: tuple val(meta), val(id) @@ -27,6 +16,7 @@ process SRATOOLS_PREFETCH { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" """ eval "\$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" @@ -36,15 +26,15 @@ process SRATOOLS_PREFETCH { fi prefetch \\ - $options.args \\ + $args \\ --progress \\ $id vdb-validate $id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') END_VERSIONS """ } diff --git a/modules/staphopiasccmec/functions.nf b/modules/staphopiasccmec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/staphopiasccmec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index 08def401..f33634ae 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAPHOPIASCCMEC { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::staphopia-sccmec=1.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0' : + 'quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,13 +15,14 @@ process STAPHOPIASCCMEC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv + staphopia-sccmec --assembly $fasta $args > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') + "${task.process}": + staphopiasccmec: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') END_VERSIONS """ } diff --git a/modules/star/align/functions.nf b/modules/star/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index e0ccba8c..46023d3e 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -1,28 +1,20 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' - } else { - container 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' : + 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' }" input: tuple val(meta), path(reads) path index path gtf + val star_ignore_sjdbgtf + val seq_platform + val seq_center output: tuple val(meta), path('*d.out.bam') , emit: bam @@ -39,12 +31,13 @@ process STAR_ALIGN { tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" - def seq_platform = params.seq_platform ? "'PL:$params.seq_platform'" : "" - def seq_center = params.seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$params.seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " - def out_sam_type = (options.args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' - def mv_unsorted_bam = (options.args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def ignore_gtf = star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" + def seq_platform = seq_platform ? "'PL:$seq_platform'" : "" + def seq_center = seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " + def out_sam_type = (args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' + def mv_unsorted_bam = (args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' """ STAR \\ --genomeDir $index \\ @@ -54,7 +47,7 @@ process STAR_ALIGN { $out_sam_type \\ $ignore_gtf \\ $seq_center \\ - $options.args + $args $mv_unsorted_bam @@ -68,8 +61,8 @@ process STAR_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } diff --git a/modules/star/genomegenerate/functions.nf b/modules/star/genomegenerate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/genomegenerate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index c932fafe..ad32c0dd 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_GENOMEGENERATE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.13 conda-forge::gawk=5.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } else { - container "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' : + 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' }" input: path fasta @@ -28,9 +17,10 @@ process STAR_GENOMEGENERATE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args_list = args.tokenize() def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' - def args = options.args.tokenize() - if (args.contains('--genomeSAindexNbases')) { + if (args_list.contains('--genomeSAindexNbases')) { """ mkdir star STAR \\ @@ -40,11 +30,11 @@ process STAR_GENOMEGENERATE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS @@ -63,11 +53,11 @@ process STAR_GENOMEGENERATE { --runThreadN $task.cpus \\ --genomeSAindexNbases \$NUM_BASES \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS diff --git a/modules/strelka/germline/functions.nf b/modules/strelka/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 5e913c40..e991db67 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--0" - } else { - container "quay.io/biocontainers/strelka:2.9.10--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--0' : + 'quay.io/biocontainers/strelka:2.9.10--0' }" input: tuple val(meta), path(input), path(input_index) @@ -25,7 +14,6 @@ process STRELKA_GERMLINE { path target_bed path target_bed_tbi - output: tuple val(meta), path("*variants.vcf.gz") , emit: vcf tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi @@ -34,14 +22,15 @@ process STRELKA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ --bam $input \\ --referenceFasta $fasta \\ $regions \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -51,8 +40,8 @@ process STRELKA_GERMLINE { mv strelka/results/variants/variants.vcf.gz.tbi ${prefix}.variants.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaGermlineWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/strelka/somatic/functions.nf b/modules/strelka/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 633b0a2c..fa138633 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1" - } else { - container "quay.io/biocontainers/strelka:2.9.10--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1' : + 'quay.io/biocontainers/strelka:2.9.10--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) @@ -33,7 +22,8 @@ process STRELKA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ @@ -43,7 +33,7 @@ process STRELKA_SOMATIC { --referenceFasta $fasta \\ $options_target_bed \\ $options_manta \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -54,8 +44,8 @@ process STRELKA_SOMATIC { mv strelka/results/variants/somatic.snvs.vcf.gz.tbi ${prefix}.somatic_snvs.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaSomaticWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/stringtie/merge/functions.nf b/modules/stringtie/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index 371533bb..756dc6ec 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE_MERGE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: path stringtie_gtf @@ -27,6 +16,7 @@ process STRINGTIE_MERGE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ stringtie \\ --merge $stringtie_gtf \\ @@ -34,8 +24,8 @@ process STRINGTIE_MERGE { -o stringtie.merged.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/stringtie/stringtie/functions.nf b/modules/stringtie/stringtie/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/stringtie/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 3579e47c..4367a84d 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: tuple val(meta), path(bam) @@ -30,7 +19,8 @@ process STRINGTIE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -48,11 +38,11 @@ process STRINGTIE { -C ${prefix}.coverage.gtf \\ -b ${prefix}.ballgown \\ -p $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/subread/featurecounts/functions.nf b/modules/subread/featurecounts/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/subread/featurecounts/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 0a0285db..43a7f8cd 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SUBREAD_FEATURECOUNTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::subread=2.0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0" - } else { - container "quay.io/biocontainers/subread:2.0.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0' : + 'quay.io/biocontainers/subread:2.0.1--hed695b0_0' }" input: tuple val(meta), path(bams), path(annotation) @@ -27,7 +16,8 @@ process SUBREAD_FEATURECOUNTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-p' def strandedness = 0 @@ -38,7 +28,7 @@ process SUBREAD_FEATURECOUNTS { } """ featureCounts \\ - $options.args \\ + $args \\ $paired_end \\ -T $task.cpus \\ -a $annotation \\ @@ -47,8 +37,8 @@ process SUBREAD_FEATURECOUNTS { ${bams.join(' ')} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") + "${task.process}": + subread: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") END_VERSIONS """ } diff --git a/modules/tabix/bgzip/functions.nf b/modules/tabix/bgzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 43726f17..13f9a942 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) @@ -26,13 +15,14 @@ process TABIX_BGZIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.${input.getExtension()}.gz + bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/bgziptabix/functions.nf b/modules/tabix/bgziptabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgziptabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index e44a7226..9a633d2e 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIPTABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) @@ -26,14 +15,16 @@ process TABIX_BGZIPTABIX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.gz - tabix $options.args2 ${prefix}.gz + bgzip -c $args $input > ${prefix}.gz + tabix $args2 ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/tabix/functions.nf b/modules/tabix/tabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/tabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index 1574c0b5..c721a554 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_TABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(tab) @@ -26,12 +15,13 @@ process TABIX_TABIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - tabix $options.args $tab + tabix $args $tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tbprofiler/profile/functions.nf b/modules/tbprofiler/profile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tbprofiler/profile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf index afd78b05..3f6bffc3 100644 --- a/modules/tbprofiler/profile/main.nf +++ b/modules/tbprofiler/profile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TBPROFILER_PROFILE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tb-profiler=3.0.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0" - } else { - container "quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0' : + 'quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0' }" input: tuple val(meta), path(reads) @@ -30,19 +19,20 @@ process TBPROFILER_PROFILE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" """ tb-profiler \\ profile \\ - $options.args \\ + $args \\ --prefix ${prefix} \\ --threads $task.cpus \\ $input_reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') + "${task.process}": + tbprofiler: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') END_VERSIONS """ } diff --git a/modules/tiddit/cov/functions.nf b/modules/tiddit/cov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/cov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index a3a8a171..e9bb9b5d 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_COV { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -25,25 +14,23 @@ process TIDDIT_COV { output: tuple val(meta), path("*.tab"), optional: true, emit: cov tuple val(meta), path("*.wig"), optional: true, emit: wig - path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ --cov \\ -o $prefix \\ - $options.args \\ + $args \\ --bam $bam \\ $reference cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tiddit/sv/functions.nf b/modules/tiddit/sv/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/sv/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index e262221a..83a46f82 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_SV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -30,19 +19,20 @@ process TIDDIT_SV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ --sv \\ - $options.args \\ + $args \\ --bam $bam \\ $reference \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/trimgalore/functions.nf b/modules/trimgalore/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/trimgalore/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 8e77f1f7..86761ad8 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TRIMGALORE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::trim-galore=0.6.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0' : + 'quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,6 +19,7 @@ process TRIMGALORE { tuple val(meta), path("*.zip") , emit: zip optional true script: + def args = task.ext.args ?: '' // Calculate number of --cores for TrimGalore based on value of task.cpus // See: https://github.com/FelixKrueger/TrimGalore/blob/master/Changelog.md#version-060-release-on-1-mar-2019 // See: https://github.com/nf-core/atacseq/pull/65 @@ -48,20 +38,20 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --gzip \\ $c_r1 \\ $tpc_r1 \\ ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ @@ -70,7 +60,7 @@ process TRIMGALORE { [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --paired \\ --gzip \\ @@ -81,8 +71,8 @@ process TRIMGALORE { ${prefix}_1.fastq.gz \\ ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ diff --git a/modules/ucsc/bed12tobigbed/functions.nf b/modules/ucsc/bed12tobigbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bed12tobigbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 81f39a6f..937eabd6 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BED12TOBIGBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedtobigbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1' }" input: tuple val(meta), path(bed) @@ -29,7 +18,8 @@ process UCSC_BED12TOBIGBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedToBigBed \\ $bed \\ @@ -37,8 +27,8 @@ process UCSC_BED12TOBIGBED { ${prefix}.bigBed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedclip/functions.nf b/modules/ucsc/bedclip/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bedclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index 5fbc2b3b..1d46342c 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedclip=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedClip \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDCLIP { ${prefix}.bedGraph cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedgraphtobigwig/functions.nf b/modules/ucsc/bedgraphtobigwig/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bedgraphtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index f55cdb07..e18b41bc 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDGRAPHTOBIGWIG { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedgraphtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDGRAPHTOBIGWIG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedGraphToBigWig \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDGRAPHTOBIGWIG { ${prefix}.bigWig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bigwigaverageoverbed/functions.nf b/modules/ucsc/bigwigaverageoverbed/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bigwigaverageoverbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 72491443..8c6f1178 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BIGWIGAVERAGEOVERBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bigwigaverageoverbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' }" input: tuple val(meta), path(bed) @@ -29,18 +18,19 @@ process UCSC_BIGWIGAVERAGEOVERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + // BUG: bigWigAverageOverBed cannot handle ensembl seqlevels style """ - # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. bigWigAverageOverBed \\ - $options.args \\ + $args \\ $bigwig \\ $bed \\ ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/liftover/functions.nf b/modules/ucsc/liftover/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/liftover/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf index 3739a1e5..1c667262 100644 --- a/modules/ucsc/liftover/main.nf +++ b/modules/ucsc/liftover/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_LIFTOVER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-liftover=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3" - } else { - container "quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3' : + 'quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3' }" input: tuple val(meta), path(bed) @@ -30,19 +19,20 @@ process UCSC_LIFTOVER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ liftOver \\ - $options.args \ + $args \ $bed \\ $chain \\ ${prefix}.lifted.bed \\ ${prefix}.unlifted.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/wigtobigwig/functions.nf b/modules/ucsc/wigtobigwig/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/wigtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index d03a2c4a..4c596c9a 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -1,45 +1,34 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' // No version information printed +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_WIGTOBIGWIG { tag '$wig' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::ucsc-wigtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2' }" input: path wig - path chromsizes + path sizes output: - path "*.bw" , emit: bw - path "versions.yml" , emit: versions + path "*.bw" , emit: bw + path "versions.yml", emit: versions script: - + def args = task.ext.args ?: '' """ wigToBigWig \\ - $options.args \\ + $args \\ $wig \\ - $chromsizes \\ + $sizes \\ ${wig.getSimpleName()}.bw cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ultra/pipeline/functions.nf b/modules/ultra/pipeline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ultra/pipeline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index b61518e6..5df34121 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ULTRA_PIPELINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" - } else { - container "quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' : + 'quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' }" input: tuple val(meta), path(reads) @@ -28,21 +17,22 @@ process ULTRA_PIPELINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ uLTRA \\ pipeline \\ --t $task.cpus \\ --prefix $prefix \\ - $options.args \\ + $args \\ $genome \\ $gtf \\ $reads \\ ./ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( uLTRA --version|sed 's/uLTRA //g' ) + "${task.process}": + ultra: \$( uLTRA --version|sed 's/uLTRA //g' ) END_VERSIONS """ } diff --git a/modules/umitools/dedup/functions.nf b/modules/umitools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 0ec9741b..287bb8c2 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_DEDUP { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,18 +15,19 @@ process UMITOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ -I $bam \\ -S ${prefix}.bam \\ $paired \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/umitools/extract/functions.nf b/modules/umitools/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index d90a3ba8..3c2402e2 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_EXTRACT { tag "$meta.id" label "process_low" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process UMITOOLS_EXTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ umi_tools \\ extract \\ -I $reads \\ -S ${prefix}.umi_extract.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } else { @@ -50,12 +40,12 @@ process UMITOOLS_EXTRACT { --read2-in=${reads[1]} \\ -S ${prefix}.umi_extract_1.fastq.gz \\ --read2-out=${prefix}.umi_extract_2.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/unicycler/functions.nf b/modules/unicycler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unicycler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 3629d730..14319dc1 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNICYCLER { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::unicycler=0.4.8' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3" - } else { - container "quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3' : + 'quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3' }" input: tuple val(meta), path(shortreads), path(longreads) @@ -28,13 +17,14 @@ process UNICYCLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" def long_reads = longreads ? "-l $longreads" : "" """ unicycler \\ --threads $task.cpus \\ - $options.args \\ + $args \\ $short_reads \\ $long_reads \\ --out ./ @@ -46,8 +36,8 @@ process UNICYCLER { mv unicycler.log ${prefix}.unicycler.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') + "${task.process}": + unicycler: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/untar/functions.nf b/modules/untar/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/untar/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/untar/main.nf b/modules/untar/main.nf index efb9d825..6d1996e7 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNTAR { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: path archive @@ -26,16 +15,19 @@ process UNTAR { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' untar = archive.toString() - '.tar.gz' """ tar \\ -xzvf \\ - $options.args \\ - $archive + $args \\ + $archive \\ + $args2 \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/unzip/functions.nf b/modules/unzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index f39e75e8..294ac0b0 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::p7zip=15.09" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4" - } else { - container "quay.io/biocontainers/p7zip:15.09--h2d50403_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4' : + 'quay.io/biocontainers/p7zip:15.09--h2d50403_4' }" input: path archive @@ -27,17 +15,17 @@ process UNZIP { path "versions.yml" , emit: versions script: - + def args = task.ext.args ?: '' if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } """ 7za \\ e \\ -o"${archive.baseName}"/ \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') END_VERSIONS """ diff --git a/modules/variantbam/functions.nf b/modules/variantbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/variantbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index e73b8bf1..3d354016 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -1,43 +1,33 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.4.4a' +def VERSION = '1.4.4a' // Version information not provided by tool on CLI process VARIANTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::variantbam=1.4.4a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5" - } else { - container "quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5' : + 'quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5' }" input: tuple val(meta), path(bam) output: - tuple val(meta), path("*.bam") , emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ variant \\ $bam \\ -o ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + variantbam: $VERSION END_VERSIONS """ } diff --git a/modules/vcftools/functions.nf b/modules/vcftools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/vcftools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 768d5a23..62fff0cf 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process VCFTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::vcftools=0.1.16" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4" - } else { - container "quay.io/biocontainers/vcftools:0.1.16--he513fc3_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4' : + 'quay.io/biocontainers/vcftools:0.1.16--he513fc3_4' }" input: // Owing to the nature of vcftools we here provide solutions to working with optional bed files and optional @@ -93,22 +82,23 @@ process VCFTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() - def bed_arg = (options.args.contains('--bed')) ? "--bed ${bed}" : - (options.args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : - (options.args.contains('--hapcount')) ? "--hapcount ${bed}" : '' - args.removeIf { it.contains('--bed') } - args.removeIf { it.contains('--exclude-bed') } - args.removeIf { it.contains('--hapcount') } + def bed_arg = (args.contains('--bed')) ? "--bed ${bed}" : + (args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : + (args.contains('--hapcount')) ? "--hapcount ${bed}" : '' + args_list.removeIf { it.contains('--bed') } + args_list.removeIf { it.contains('--exclude-bed') } + args_list.removeIf { it.contains('--hapcount') } - def diff_variant_arg = (options.args.contains('--diff')) ? "--diff ${diff_variant_file}" : - (options.args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : - (options.args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' - args.removeIf { it.contains('--diff') } - args.removeIf { it.contains('--gzdiff') } - args.removeIf { it.contains('--diff-bcf') } + def diff_variant_arg = (args.contains('--diff')) ? "--diff ${diff_variant_file}" : + (args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : + (args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' + args_list.removeIf { it.contains('--diff') } + args_list.removeIf { it.contains('--gzdiff') } + args_list.removeIf { it.contains('--diff-bcf') } def input_file = ("$variant_file".endsWith(".vcf")) ? "--vcf ${variant_file}" : ("$variant_file".endsWith(".vcf.gz")) ? "--gzvcf ${variant_file}" : @@ -118,13 +108,13 @@ process VCFTOOLS { vcftools \\ $input_file \\ --out $prefix \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $bed_arg \\ $diff_variant_arg cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') + "${task.process}": + vcftools: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') END_VERSIONS """ } diff --git a/modules/yara/index/functions.nf b/modules/yara/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index 51ae8a32..77122c78 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::yara=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/yara:1.0.2--2" - } else { - container "quay.io/biocontainers/yara:1.0.2--2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/yara:1.0.2--2' : + 'quay.io/biocontainers/yara:1.0.2--2' }" input: path fasta @@ -26,6 +15,7 @@ process YARA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir yara @@ -38,8 +28,8 @@ process YARA_INDEX { cp $fasta yara/yara.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/yara/mapper/functions.nf b/modules/yara/mapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/mapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 3d69674c..6e7f433b 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_MAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::yara=1.0.2 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } else { - container "quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' : + 'quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process YARA_MAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ yara_mapper \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f bam \\ ${index}/yara \\ $reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ yara_mapper \\ - $options.args \\ - -t ${task.cpus} \\ + $args \\ + -t $task.cpus \\ -f bam \\ ${index}/yara \\ ${reads[0]} \\ @@ -57,8 +47,8 @@ process YARA_MAPPER { samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index cd22dde8..741edf5e 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -6,9 +6,9 @@ params { } process { - cpus = 2 - memory = 3.GB - time = 2.h + cpus = 2 + memory = 3.GB + time = 2.h } if ("$PROFILE" == "singularity") { @@ -28,5 +28,5 @@ conda { createTimeout = "120 min" } includeConfig 'test_data.config' manifest { - nextflowVersion = '!>=21.04.0' + nextflowVersion = '!>=21.10.3' } diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d68d64d0..aa59b7c9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -305,7 +305,7 @@ cooler/digest: cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** - + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** @@ -349,7 +349,7 @@ dastool/dastool: dastool/scaffolds2bin: - modules/dastool/scaffolds2bin/** - tests/modules/dastool/scaffolds2bin/** - + dedup: - modules/dedup/** - tests/modules/dedup/** @@ -490,7 +490,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: &gatk4_calculatecontamination +gatk4/calculatecontamination: #&gatk4_calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -498,7 +498,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: &gatk4_createsomaticpanelofnormals +gatk4/createsomaticpanelofnormals: #&gatk4_createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -510,11 +510,11 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: &gatk4_filtermutectcalls +gatk4/filtermutectcalls: #&gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: &gatk4_genomicsdbimport +gatk4/genomicsdbimport: #&gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -522,7 +522,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: &gatk4_getpileupsummaries +gatk4/getpileupsummaries: #&gatk4_getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -538,7 +538,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: &gatk4_learnreadorientationmodel +gatk4/learnreadorientationmodel: #&gatk4_learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -554,7 +554,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: &gatk4_mutect2 +gatk4/mutect2: #&gatk4_mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -746,13 +746,13 @@ kraken2/kraken2: - modules/untar/** - tests/modules/kraken2/kraken2/** -kronatools/kronadb: - - modules/kronatools/kronadb/** - - tests/modules/kronatools/kronadb/** +krona/kronadb: + - modules/krona/kronadb/** + - tests/modules/krona/kronadb/** -kronatools/ktimporttaxonomy: - - modules/kronatools/ktimporttaxonomy/** - - tests/modules/kronatools/ktimporttaxonomy/** +krona/ktimporttaxonomy: + - modules/krona/ktimporttaxonomy/** + - tests/modules/krona/ktimporttaxonomy/** last/dotplot: - modules/last/dotplot/** @@ -1063,6 +1063,10 @@ plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +plink2/vcf: + - modules/plink2/vcf/** + - tests/modules/plink2/vcf/** + pmdtools/filter: - modules/pmdtools/filter/** - tests/modules/pmdtools/filter/** @@ -1211,7 +1215,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: &samtools_index +samtools/index: #&samtools_index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1223,7 +1227,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: &samtools_sort +samtools/sort: #&samtools_sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1246,7 +1250,7 @@ seacr/callpeak: seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** - + seqsero2: - modules/seqsero2/** - tests/modules/seqsero2/** @@ -1299,11 +1303,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: &sratools_fasterqdump +sratools/fasterqdump: #&sratools_fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: &sratools_prefetch +sratools/prefetch: #&sratools_prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1423,47 +1427,47 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** -subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools - - subworkflows/nf-core/bam_stats_samtools/** - - tests/subworkflows/nf-core/bam_stats_samtools/** +# subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools +# - subworkflows/nf-core/bam_stats_samtools/** +# - tests/subworkflows/nf-core/bam_stats_samtools/** -subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools - - subworkflows/nf-core/bam_sort_samtools/** - - tests/subworkflows/nf-core/bam_sort_samtools/** - - *samtools_sort - - *samtools_index - - *subworkflows_bam_stats_samtools +# subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools +# - subworkflows/nf-core/bam_sort_samtools/** +# - tests/subworkflows/nf-core/bam_sort_samtools/** +# - *samtools_sort +# - *samtools_index +# - *subworkflows_bam_stats_samtools -subworkflows/align_bowtie2: - - subworkflows/nf-core/align_bowtie2/** - - tests/subworkflows/nf-core/align_bowtie2/** - - *subworkflows_bam_sort_samtools +# subworkflows/align_bowtie2: +# - subworkflows/nf-core/align_bowtie2/** +# - tests/subworkflows/nf-core/align_bowtie2/** +# - *subworkflows_bam_sort_samtools -subworkflows/sra_fastq: - - subworkflows/nf-core/sra_fastq/** - - tests/subworkflows/nf-core/sra_fastq/** - - *sratools_fasterqdump - - *sratools_prefetch +# subworkflows/sra_fastq: +# - subworkflows/nf-core/sra_fastq/** +# - tests/subworkflows/nf-core/sra_fastq/** +# - *sratools_fasterqdump +# - *sratools_prefetch -subworkflows/gatk_create_som_pon: - - subworkflows/nf-core/gatk_create_som_pon/** - - tests/subworkflows/nf-core/gatk_create_som_pon/** - - *gatk4_genomicsdbimport - - *gatk4_createsomaticpanelofnormals +# subworkflows/gatk_create_som_pon: +# - subworkflows/nf-core/gatk_create_som_pon/** +# - tests/subworkflows/nf-core/gatk_create_som_pon/** +# - *gatk4_genomicsdbimport +# - *gatk4_createsomaticpanelofnormals -subworkflows/gatk_tumor_normal_somatic_variant_calling: - - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - *gatk4_mutect2 - - *gatk4_learnreadorientationmodel - - *gatk4_getpileupsummaries - - *gatk4_calculatecontamination - - *gatk4_filtermutectcalls - -subworkflows/gatk_tumor_only_somatic_variant_calling: - - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - *gatk4_mutect2 - - *gatk4_getpileupsummaries - - *gatk4_calculatecontamination - - *gatk4_filtermutectcalls +# subworkflows/gatk_tumor_normal_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_learnreadorientationmodel +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls + +# subworkflows/gatk_tumor_only_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 0c7ce2fc..31e17618 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -221,8 +221,8 @@ params { test_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test.narrowPeak" test2_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test2.narrowPeak" - test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_1.fastq.gz" - test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_2.fastq.gz" + test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R1_001.fastq.gz" + test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R2_001.fastq.gz" test_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test.yak" test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" @@ -252,6 +252,56 @@ params { filelist = "${test_data_dir}/genomics/homo_sapiens/pacbio/txt/filelist.txt" } } + 'bacteroides_fragilis' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz" + genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf" + } + 'illumina' { + test1_contigs_fa_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" + test1_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" + test1_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" + test2_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" + test2_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + test1_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.bam" + test1_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam" + test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" + test2_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.bam" + test2_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam" + test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" + overlap_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/overlap.paf" + } + } + 'candidatus_portiera_aleyrodidarum' { + 'genome' { + genome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.fasta" + genome_sizes = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.sizes" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.nwk" + proteome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/proteome.fasta" + test1_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test1.gff" + test2_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test2.gff" + test3_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test3.gff" + } + 'illumina' { + test_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fasta/test_1.fastq.gz" + test_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_2.fastq.gz" + test_se_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_se.fastq.gz" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/nanopore/fastq/test.fastq.gz" + } + } + 'haemophilus_influenzae' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.fna.gz" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.nwk" + } + } 'generic' { 'csv' { test_csv = "${test_data_dir}/generic/csv/test.csv" @@ -282,28 +332,5 @@ params { } } - 'bacteroides_fragilis'{ - 'genome' { - genome_fna_gz = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.fna.gz" - genome_paf = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.paf" - } - 'illumina' { - test1_contigs_fa_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" - test1_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" - test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" - test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" - test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" - test1_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.bam" - test1_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam" - test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" - test2_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.bam" - test2_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam" - test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" - } - 'nanopore' { - test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" - overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" - } - } } } diff --git a/tests/modules/abacas/main.nf b/tests/modules/abacas/main.nf index dc58ed61..542a67af 100644 --- a/tests/modules/abacas/main.nf +++ b/tests/modules/abacas/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ABACAS } from '../../../modules/abacas/main.nf' addParams ( options: ['args' : '-m -p nucmer'] ) +include { ABACAS } from '../../../modules/abacas/main.nf' workflow test_abacas { diff --git a/tests/modules/abacas/nextflow.config b/tests/modules/abacas/nextflow.config new file mode 100644 index 00000000..17296503 --- /dev/null +++ b/tests/modules/abacas/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ABACAS { + ext.args = '-m -p nucmer' + } + +} diff --git a/tests/modules/abacas/test.yml b/tests/modules/abacas/test.yml index 899bc4db..c466a6ed 100644 --- a/tests/modules/abacas/test.yml +++ b/tests/modules/abacas/test.yml @@ -1,5 +1,5 @@ - name: abacas - command: nextflow run ./tests/modules/abacas -entry test_abacas -c tests/config/nextflow.config + command: nextflow run ./tests/modules/abacas -entry test_abacas -c ./tests/config/nextflow.config -c ./tests/modules/abacas/nextflow.config tags: - abacas files: diff --git a/tests/modules/adapterremoval/main.nf b/tests/modules/adapterremoval/main.nf index 9dd37aa9..ee7f1c44 100644 --- a/tests/modules/adapterremoval/main.nf +++ b/tests/modules/adapterremoval/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' addParams( options: [:] ) +include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' workflow test_adapterremoval_single_end { input = [ [ id:'test', single_end:true, collapse:false ], // meta map diff --git a/tests/modules/adapterremoval/nextflow.config b/tests/modules/adapterremoval/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/adapterremoval/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 318e7866..a6c4a6cf 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -1,5 +1,5 @@ - name: adapterremoval test_adapterremoval_single_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_single_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: @@ -9,7 +9,7 @@ md5sum: 62139afee94defad5b83bdd0b8475a1f - name: adapterremoval test_adapterremoval_paired_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: @@ -21,7 +21,7 @@ md5sum: de7b38e2c881bced8671acb1ab452d78 - name: adapterremoval test_adapterremoval_paired_end_collapse - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: diff --git a/tests/modules/agrvate/main.nf b/tests/modules/agrvate/main.nf index 58058fe3..ac682bef 100644 --- a/tests/modules/agrvate/main.nf +++ b/tests/modules/agrvate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { AGRVATE } from '../../../modules/agrvate/main.nf' addParams( options: ["args": "--mummer"] ) +include { AGRVATE } from '../../../modules/agrvate/main.nf' workflow test_agrvate { diff --git a/tests/modules/agrvate/nextflow.config b/tests/modules/agrvate/nextflow.config new file mode 100644 index 00000000..7f127e5e --- /dev/null +++ b/tests/modules/agrvate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: AGRVATE { + ext.args = '--mummer' + } + +} diff --git a/tests/modules/agrvate/test.yml b/tests/modules/agrvate/test.yml index ec413663..36e8886c 100644 --- a/tests/modules/agrvate/test.yml +++ b/tests/modules/agrvate/test.yml @@ -1,5 +1,5 @@ - name: agrvate - command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c ./tests/config/nextflow.config -c ./tests/modules/agrvate/nextflow.config tags: - agrvate files: diff --git a/tests/modules/allelecounter/main.nf b/tests/modules/allelecounter/main.nf index b938ab94..3fe11be3 100644 --- a/tests/modules/allelecounter/main.nf +++ b/tests/modules/allelecounter/main.nf @@ -1,7 +1,7 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' addParams( options: [:] ) +include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' workflow test_allelecounter_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/allelecounter/nextflow.config b/tests/modules/allelecounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/allelecounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/allelecounter/test.yml b/tests/modules/allelecounter/test.yml index bbef0ecc..a0afbc12 100644 --- a/tests/modules/allelecounter/test.yml +++ b/tests/modules/allelecounter/test.yml @@ -1,5 +1,5 @@ - name: allelecounter test_allelecounter_bam - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_bam -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: @@ -7,7 +7,7 @@ md5sum: 2bbe9d7331b78bdac30fe30dbc5fdaf3 - name: allelecounter test_allelecounter_cram - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_cram -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: diff --git a/tests/modules/amps/main.nf b/tests/modules/amps/main.nf index 7d7a40d1..15572096 100644 --- a/tests/modules/amps/main.nf +++ b/tests/modules/amps/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [args: "-f def_anc"] ) -include { AMPS } from '../../../modules/amps/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' +include { AMPS } from '../../../modules/amps/main.nf' workflow test_amps { diff --git a/tests/modules/amps/nextflow.config b/tests/modules/amps/nextflow.config new file mode 100644 index 00000000..b58ac3fe --- /dev/null +++ b/tests/modules/amps/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MALTEXTRACT { + ext.args = '-f def_anc' + } + +} diff --git a/tests/modules/amps/test.yml b/tests/modules/amps/test.yml index 04691f18..f38320e4 100644 --- a/tests/modules/amps/test.yml +++ b/tests/modules/amps/test.yml @@ -1,5 +1,5 @@ - name: amps - command: nextflow run ./tests/modules/amps -entry test_amps -c tests/config/nextflow.config + command: nextflow run ./tests/modules/amps -entry test_amps -c ./tests/config/nextflow.config -c ./tests/modules/amps/nextflow.config tags: - amps files: diff --git a/tests/modules/arriba/main.nf b/tests/modules/arriba/main.nf index 833742d6..60741275 100644 --- a/tests/modules/arriba/main.nf +++ b/tests/modules/arriba/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 11'] ) -include { STAR_ALIGN } from '../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) -include { ARRIBA } from '../../../modules/arriba/main.nf' addParams( options: [:] ) +include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../modules/star/align/main.nf' +include { ARRIBA } from '../../../modules/arriba/main.nf' workflow test_arriba_single_end { @@ -14,9 +14,12 @@ workflow test_arriba_single_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } @@ -29,8 +32,11 @@ workflow test_arriba_paired_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } diff --git a/tests/modules/arriba/nextflow.config b/tests/modules/arriba/nextflow.config new file mode 100644 index 00000000..1b66d8df --- /dev/null +++ b/tests/modules/arriba/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 11' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + +} diff --git a/tests/modules/arriba/test.yml b/tests/modules/arriba/test.yml index c1dc7c1e..52743167 100644 --- a/tests/modules/arriba/test.yml +++ b/tests/modules/arriba/test.yml @@ -1,5 +1,5 @@ - name: arriba test_arriba_single_end - command: nextflow run tests/modules/arriba -entry test_arriba_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_single_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -7,46 +7,45 @@ md5sum: cad8c215b938d1e45b747a5b7898a4c2 - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: 29c99195dcc79ff4df1f754ff16aac78 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - name: arriba test_arriba_paired_end - command: nextflow run tests/modules/arriba -entry test_arriba_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -54,39 +53,38 @@ md5sum: 85e36c887464e4deaa65f45174d3b8fd - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: d724ca90a102347b9c5052a33ea4d308 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out diff --git a/tests/modules/artic/guppyplex/main.nf b/tests/modules/artic/guppyplex/main.nf index 972a6e66..89f67c74 100644 --- a/tests/modules/artic/guppyplex/main.nf +++ b/tests/modules/artic/guppyplex/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' addParams( options: [:] ) +include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' process STAGE_FASTQ_DIR { input: diff --git a/tests/modules/artic/guppyplex/nextflow.config b/tests/modules/artic/guppyplex/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/guppyplex/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/guppyplex/test.yml b/tests/modules/artic/guppyplex/test.yml index 133f0b15..6fd10898 100644 --- a/tests/modules/artic/guppyplex/test.yml +++ b/tests/modules/artic/guppyplex/test.yml @@ -1,5 +1,5 @@ - name: artic guppyplex - command: nextflow run tests/modules/artic/guppyplex -entry test_artic_guppyplex -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/guppyplex -entry test_artic_guppyplex -c ./tests/config/nextflow.config -c ./tests/modules/artic/guppyplex/nextflow.config tags: - artic - artic/guppyplex diff --git a/tests/modules/artic/minion/main.nf b/tests/modules/artic/minion/main.nf index f4993289..ca66ede0 100644 --- a/tests/modules/artic/minion/main.nf +++ b/tests/modules/artic/minion/main.nf @@ -3,17 +3,19 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' addParams( fast5_dir: true, sequencing_summary: true, artic_minion_medaka_model:false ) +include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' workflow test_artic_minion { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - fast5_tar = [ file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) ] - sequencing_summary = [ file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) ] - fasta = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) ] - bed = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) ] - dummy_file = [ ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + fast5_tar = file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) + sequencing_summary = file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) + fasta = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) + bed = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) + dummy_file = [] fast5_dir = UNTAR ( fast5_tar ).untar diff --git a/tests/modules/artic/minion/nextflow.config b/tests/modules/artic/minion/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/minion/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/minion/test.yml b/tests/modules/artic/minion/test.yml index b3c5f0f1..8b36b224 100644 --- a/tests/modules/artic/minion/test.yml +++ b/tests/modules/artic/minion/test.yml @@ -1,5 +1,5 @@ - name: artic minion - command: nextflow run tests/modules/artic/minion -entry test_artic_minion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/minion -entry test_artic_minion -c ./tests/config/nextflow.config -c ./tests/modules/artic/minion/nextflow.config tags: - artic - artic/minion diff --git a/tests/modules/assemblyscan/main.nf b/tests/modules/assemblyscan/main.nf index 6f3cbb5e..7cd5f393 100644 --- a/tests/modules/assemblyscan/main.nf +++ b/tests/modules/assemblyscan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' addParams( options: [:] ) +include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' workflow test_assemblyscan { diff --git a/tests/modules/assemblyscan/nextflow.config b/tests/modules/assemblyscan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/assemblyscan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/assemblyscan/test.yml b/tests/modules/assemblyscan/test.yml index 0eb4ad66..4a3ba5ec 100644 --- a/tests/modules/assemblyscan/test.yml +++ b/tests/modules/assemblyscan/test.yml @@ -1,5 +1,5 @@ - name: assemblyscan test_assemblyscan - command: nextflow run tests/modules/assemblyscan -entry test_assemblyscan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/assemblyscan -entry test_assemblyscan -c ./tests/config/nextflow.config -c ./tests/modules/assemblyscan/nextflow.config tags: - assemblyscan files: diff --git a/tests/modules/ataqv/ataqv/main.nf b/tests/modules/ataqv/ataqv/main.nf index 2f2a62eb..b1103350 100644 --- a/tests/modules/ataqv/ataqv/main.nf +++ b/tests/modules/ataqv/ataqv/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: [:] ) -include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: ['args': '--log-problematic-reads'] ) +include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' +include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' workflow test_ataqv_ataqv { diff --git a/tests/modules/ataqv/ataqv/nextflow.config b/tests/modules/ataqv/ataqv/nextflow.config new file mode 100644 index 00000000..31700510 --- /dev/null +++ b/tests/modules/ataqv/ataqv/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ATAQV_ATAQV_PROBLEM_READS { + ext.args = '--log-problematic-reads' + } + +} diff --git a/tests/modules/ataqv/ataqv/test.yml b/tests/modules/ataqv/ataqv/test.yml index 77452f6f..f9f2a888 100644 --- a/tests/modules/ataqv/ataqv/test.yml +++ b/tests/modules/ataqv/ataqv/test.yml @@ -1,5 +1,5 @@ - name: ataqv ataqv test_ataqv_ataqv - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -9,7 +9,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_problem_reads - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -21,7 +21,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_peak - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -31,7 +31,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_tss - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -41,7 +41,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_excluded_regs - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv diff --git a/tests/modules/bakta/main.nf b/tests/modules/bakta/main.nf index 531099f1..1bc00622 100644 --- a/tests/modules/bakta/main.nf +++ b/tests/modules/bakta/main.nf @@ -2,12 +2,14 @@ nextflow.enable.dsl = 2 -include { BAKTA } from '../../../modules/bakta/main.nf' addParams( options: [:] ) +include { BAKTA } from '../../../modules/bakta/main.nf' workflow test_bakta { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] BAKTA ( input, [], [], [] ) } diff --git a/tests/modules/bakta/nextflow.config b/tests/modules/bakta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bakta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamaligncleaner/main.nf b/tests/modules/bamaligncleaner/main.nf index 94ee005f..c9d517ae 100644 --- a/tests/modules/bamaligncleaner/main.nf +++ b/tests/modules/bamaligncleaner/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' addParams( options: [:] ) +include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' workflow test_bamaligncleaner { diff --git a/tests/modules/bamaligncleaner/nextflow.config b/tests/modules/bamaligncleaner/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamaligncleaner/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamaligncleaner/test.yml b/tests/modules/bamaligncleaner/test.yml index 568925b0..4207b8c2 100644 --- a/tests/modules/bamaligncleaner/test.yml +++ b/tests/modules/bamaligncleaner/test.yml @@ -1,5 +1,5 @@ - name: bamaligncleaner - command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c ./tests/config/nextflow.config -c ./tests/modules/bamaligncleaner/nextflow.config tags: - bamaligncleaner files: diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 5538c86f..eb0bed01 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' addParams( options: [args:"-reference"] ) +include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' workflow test_bamtools_split { diff --git a/tests/modules/bamtools/split/nextflow.config b/tests/modules/bamtools/split/nextflow.config new file mode 100644 index 00000000..e7de5477 --- /dev/null +++ b/tests/modules/bamtools/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BAMTOOLS_SPLIT { + ext.args = '-reference' + } + +} diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index f28a9bcf..4f52e9ce 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,5 +1,5 @@ - name: bamtools split test_bamtools_split - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config tags: - bamtools/split - bamtools diff --git a/tests/modules/bamutil/trimbam/main.nf b/tests/modules/bamutil/trimbam/main.nf index 3699756c..2967b038 100644 --- a/tests/modules/bamutil/trimbam/main.nf +++ b/tests/modules/bamutil/trimbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' addParams( options: [:] ) +include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' workflow test_bamutil_trimbam { diff --git a/tests/modules/bamutil/trimbam/nextflow.config b/tests/modules/bamutil/trimbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamutil/trimbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamutil/trimbam/test.yml b/tests/modules/bamutil/trimbam/test.yml index 95ddc3b3..443a4ded 100644 --- a/tests/modules/bamutil/trimbam/test.yml +++ b/tests/modules/bamutil/trimbam/test.yml @@ -1,5 +1,5 @@ - name: bamutil trimbam test_bamutil_trimbam - command: nextflow run tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c ./tests/config/nextflow.config -c ./tests/modules/bamutil/trimbam/nextflow.config tags: - bamutil/trimbam - bamutil diff --git a/tests/modules/bandage/image/main.nf b/tests/modules/bandage/image/main.nf index 524066b0..15f01ab1 100644 --- a/tests/modules/bandage/image/main.nf +++ b/tests/modules/bandage/image/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' addParams( options: [:] ) +include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' workflow test_bandage_image { input = [ diff --git a/tests/modules/bandage/image/nextflow.config b/tests/modules/bandage/image/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bandage/image/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bandage/image/test.yml b/tests/modules/bandage/image/test.yml index 437eca05..2abdd175 100644 --- a/tests/modules/bandage/image/test.yml +++ b/tests/modules/bandage/image/test.yml @@ -1,5 +1,5 @@ - name: bandage image - command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c ./tests/config/nextflow.config -c ./tests/modules/bandage/image/nextflow.config tags: - bandage - bandage/image diff --git a/tests/modules/bbmap/align/main.nf b/tests/modules/bbmap/align/main.nf index c3bf43ba..c7a02e2a 100644 --- a/tests/modules/bbmap/align/main.nf +++ b/tests/modules/bbmap/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' addParams( options: [args: "unpigz=t" ] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' +include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' +include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' workflow test_bbmap_align_paired_end_fasta_ref { diff --git a/tests/modules/bbmap/align/nextflow.config b/tests/modules/bbmap/align/nextflow.config new file mode 100644 index 00000000..fe0afd72 --- /dev/null +++ b/tests/modules/bbmap/align/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_ALIGN_PIGZ { + ext.args = 'unpigz=t' + } + +} diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index a30713c9..d9f9a862 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -1,5 +1,5 @@ - name: bbmap align paired end fasta ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -9,7 +9,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -19,7 +19,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -29,7 +29,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align diff --git a/tests/modules/bbmap/bbduk/main.nf b/tests/modules/bbmap/bbduk/main.nf index 911ca391..e1f0c2de 100644 --- a/tests/modules/bbmap/bbduk/main.nf +++ b/tests/modules/bbmap/bbduk/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' addParams( options: [ 'args' : 'trimq=10 qtrim=r', 'suffix' : '.trim' ] ) +include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' workflow test_bbmap_bbduk_single_end { diff --git a/tests/modules/bbmap/bbduk/nextflow.config b/tests/modules/bbmap/bbduk/nextflow.config new file mode 100644 index 00000000..8940a9be --- /dev/null +++ b/tests/modules/bbmap/bbduk/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_BBDUK { + ext.args = 'trimq=10 qtrim=r' + ext.suffix = '.trim' + } + +} diff --git a/tests/modules/bbmap/bbduk/test.yml b/tests/modules/bbmap/bbduk/test.yml index 4d2b8604..7ab5b963 100644 --- a/tests/modules/bbmap/bbduk/test.yml +++ b/tests/modules/bbmap/bbduk/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbduk test_bbmap_bbduk_single_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -10,7 +10,7 @@ md5sum: a87d0cbd5ced7df8bf1751e4cb407482 - name: bbmap bbduk test_bbmap_bbduk_paired_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -23,7 +23,7 @@ md5sum: 406e068fbe198f02b48e7e210cc0c69f - name: bbmap bbduk test_bbmap_bbduk_se_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -34,7 +34,7 @@ md5sum: 3970e82605c7d109bb348fc94e9eecc0 - name: bbmap bbduk test_bbmap_bbduk_pe_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: diff --git a/tests/modules/bbmap/bbsplit/main.nf b/tests/modules/bbmap/bbsplit/main.nf index 1d3c30c1..d1236061 100644 --- a/tests/modules/bbmap/bbsplit/main.nf +++ b/tests/modules/bbmap/bbsplit/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' workflow test_bbmap_bbsplit { diff --git a/tests/modules/bbmap/bbsplit/nextflow.config b/tests/modules/bbmap/bbsplit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/bbsplit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/bbsplit/test.yml b/tests/modules/bbmap/bbsplit/test.yml index 87bdebea..add9b519 100644 --- a/tests/modules/bbmap/bbsplit/test.yml +++ b/tests/modules/bbmap/bbsplit/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbsplit test_bbmap_bbsplit - command: nextflow run tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbsplit/nextflow.config tags: - bbmap/bbsplit - bbmap diff --git a/tests/modules/bbmap/index/main.nf b/tests/modules/bbmap/index/main.nf index 0d912615..a6f111f4 100644 --- a/tests/modules/bbmap/index/main.nf +++ b/tests/modules/bbmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' workflow test_bbmap_index { diff --git a/tests/modules/bbmap/index/nextflow.config b/tests/modules/bbmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/index/test.yml b/tests/modules/bbmap/index/test.yml index 32684ad4..4e8d7196 100644 --- a/tests/modules/bbmap/index/test.yml +++ b/tests/modules/bbmap/index/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml bbmap/index - name: bbmap index - command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/index/nextflow.config tags: - bbmap - bbmap/index diff --git a/tests/modules/bcftools/concat/main.nf b/tests/modules/bcftools/concat/main.nf index 8869a3d7..8441d488 100644 --- a/tests/modules/bcftools/concat/main.nf +++ b/tests/modules/bcftools/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' workflow test_bcftools_concat { diff --git a/tests/modules/bcftools/concat/nextflow.config b/tests/modules/bcftools/concat/nextflow.config new file mode 100644 index 00000000..3f0d064a --- /dev/null +++ b/tests/modules/bcftools/concat/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_CONCAT { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/concat/test.yml b/tests/modules/bcftools/concat/test.yml index 413fe798..fee6158f 100644 --- a/tests/modules/bcftools/concat/test.yml +++ b/tests/modules/bcftools/concat/test.yml @@ -1,5 +1,5 @@ - name: bcftools concat test_bcftools_concat - command: nextflow run tests/modules/bcftools/concat -entry test_bcftools_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config tags: - bcftools/concat - bcftools diff --git a/tests/modules/bcftools/consensus/main.nf b/tests/modules/bcftools/consensus/main.nf index 13f7b39e..ab00fbce 100644 --- a/tests/modules/bcftools/consensus/main.nf +++ b/tests/modules/bcftools/consensus/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' addParams( options: [:] ) +include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' workflow test_bcftools_consensus { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/consensus/nextflow.config b/tests/modules/bcftools/consensus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/consensus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/consensus/test.yml b/tests/modules/bcftools/consensus/test.yml index b3760fcd..7fa4ecae 100644 --- a/tests/modules/bcftools/consensus/test.yml +++ b/tests/modules/bcftools/consensus/test.yml @@ -1,5 +1,5 @@ - name: bcftools consensus test_bcftools_consensus - command: nextflow run tests/modules/bcftools/consensus -entry test_bcftools_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/consensus -entry test_bcftools_consensus -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/consensus/nextflow.config tags: - bcftools/consensus - bcftools diff --git a/tests/modules/bcftools/filter/main.nf b/tests/modules/bcftools/filter/main.nf index bd419e3a..85fbf950 100644 --- a/tests/modules/bcftools/filter/main.nf +++ b/tests/modules/bcftools/filter/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' workflow test_bcftools_filter { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/filter/nextflow.config b/tests/modules/bcftools/filter/nextflow.config new file mode 100644 index 00000000..68cac7bb --- /dev/null +++ b/tests/modules/bcftools/filter/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_FILTER { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/filter/test.yml b/tests/modules/bcftools/filter/test.yml index 0f8e48eb..da842538 100644 --- a/tests/modules/bcftools/filter/test.yml +++ b/tests/modules/bcftools/filter/test.yml @@ -1,5 +1,5 @@ - name: bcftools filter test_bcftools_filter - command: nextflow run tests/modules/bcftools/filter -entry test_bcftools_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/filter -entry test_bcftools_filter -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/filter/nextflow.config tags: - bcftools/filter - bcftools diff --git a/tests/modules/bcftools/index/main.nf b/tests/modules/bcftools/index/main.nf index 73909d66..839cd988 100644 --- a/tests/modules/bcftools/index/main.nf +++ b/tests/modules/bcftools/index/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [:] ) -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [args: '-t'] ) +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' workflow test_bcftools_index_csi { diff --git a/tests/modules/bcftools/index/nextflow.config b/tests/modules/bcftools/index/nextflow.config new file mode 100644 index 00000000..9a060ba2 --- /dev/null +++ b/tests/modules/bcftools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_INDEX_TBI { + ext.args = '-t' + } + +} diff --git a/tests/modules/bcftools/index/test.yml b/tests/modules/bcftools/index/test.yml index 36c5f3c0..f1a29437 100644 --- a/tests/modules/bcftools/index/test.yml +++ b/tests/modules/bcftools/index/test.yml @@ -1,5 +1,5 @@ - name: bcftools index - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index @@ -8,7 +8,7 @@ md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd - name: bcftools index tbi - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index diff --git a/tests/modules/bcftools/isec/main.nf b/tests/modules/bcftools/isec/main.nf index 1b0c2c07..0b8ffc5c 100644 --- a/tests/modules/bcftools/isec/main.nf +++ b/tests/modules/bcftools/isec/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' addParams( options: ['args': '--nfiles +2 --output-type z --no-version'] ) +include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' workflow test_bcftools_isec { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/isec/nextflow.config b/tests/modules/bcftools/isec/nextflow.config new file mode 100644 index 00000000..770e4674 --- /dev/null +++ b/tests/modules/bcftools/isec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_ISEC { + ext.args = '--nfiles +2 --output-type z --no-version' + } + +} diff --git a/tests/modules/bcftools/isec/test.yml b/tests/modules/bcftools/isec/test.yml index 92186c89..fc887d9d 100644 --- a/tests/modules/bcftools/isec/test.yml +++ b/tests/modules/bcftools/isec/test.yml @@ -1,5 +1,5 @@ - name: bcftools isec test_bcftools_isec - command: nextflow run tests/modules/bcftools/isec -entry test_bcftools_isec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/isec -entry test_bcftools_isec -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/isec/nextflow.config tags: - bcftools - bcftools/isec diff --git a/tests/modules/bcftools/merge/main.nf b/tests/modules/bcftools/merge/main.nf index a672a9a7..119e237a 100644 --- a/tests/modules/bcftools/merge/main.nf +++ b/tests/modules/bcftools/merge/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' addParams( options: ['args': '--force-samples --no-version'] ) +include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' workflow test_bcftools_merge { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/merge/nextflow.config b/tests/modules/bcftools/merge/nextflow.config new file mode 100644 index 00000000..e11e50b6 --- /dev/null +++ b/tests/modules/bcftools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MERGE { + ext.args = '--force-samples --no-version' + } + +} diff --git a/tests/modules/bcftools/merge/test.yml b/tests/modules/bcftools/merge/test.yml index d3cdd74a..6c9dd556 100644 --- a/tests/modules/bcftools/merge/test.yml +++ b/tests/modules/bcftools/merge/test.yml @@ -1,5 +1,5 @@ - name: bcftools merge test_bcftools_merge - command: nextflow run tests/modules/bcftools/merge -entry test_bcftools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/merge -entry test_bcftools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/merge/nextflow.config tags: - bcftools/merge - bcftools diff --git a/tests/modules/bcftools/mpileup/main.nf b/tests/modules/bcftools/mpileup/main.nf index 2225c5e0..813ca408 100644 --- a/tests/modules/bcftools/mpileup/main.nf +++ b/tests/modules/bcftools/mpileup/main.nf @@ -2,8 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' addParams( options: ['args2': '--no-version --ploidy 1 --multiallelic-caller', - 'args3': '--no-version' ] ) +include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' workflow test_bcftools_mpileup { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/mpileup/nextflow.config b/tests/modules/bcftools/mpileup/nextflow.config new file mode 100644 index 00000000..c21fef8d --- /dev/null +++ b/tests/modules/bcftools/mpileup/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MPILEUP { + ext.args2 = '--no-version --ploidy 1 --multiallelic-caller' + ext.args3 = '--no-version' + } + +} diff --git a/tests/modules/bcftools/mpileup/test.yml b/tests/modules/bcftools/mpileup/test.yml index 71877e29..f081c543 100644 --- a/tests/modules/bcftools/mpileup/test.yml +++ b/tests/modules/bcftools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: bcftools mpileup test_bcftools_mpileup - command: nextflow run tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/mpileup/nextflow.config tags: - bcftools/mpileup - bcftools diff --git a/tests/modules/bcftools/norm/main.nf b/tests/modules/bcftools/norm/main.nf index 046c0b3c..ac056bea 100644 --- a/tests/modules/bcftools/norm/main.nf +++ b/tests/modules/bcftools/norm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' addParams( options: ['args': '-m -any --no-version'] ) +include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' workflow test_bcftools_norm { diff --git a/tests/modules/bcftools/norm/nextflow.config b/tests/modules/bcftools/norm/nextflow.config new file mode 100644 index 00000000..e4d27a73 --- /dev/null +++ b/tests/modules/bcftools/norm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_NORM { + ext.args = '-m -any --no-version' + } + +} diff --git a/tests/modules/bcftools/norm/test.yml b/tests/modules/bcftools/norm/test.yml index 40d0cc7e..bb4f9aca 100644 --- a/tests/modules/bcftools/norm/test.yml +++ b/tests/modules/bcftools/norm/test.yml @@ -1,5 +1,5 @@ - name: bcftools norm - command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/norm/nextflow.config tags: - bcftools - bcftools/norm diff --git a/tests/modules/bcftools/query/main.nf b/tests/modules/bcftools/query/main.nf index a16ceddf..733cae17 100644 --- a/tests/modules/bcftools/query/main.nf +++ b/tests/modules/bcftools/query/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' addParams( options: ['args': "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" ] ) +include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' workflow test_bcftools_query { diff --git a/tests/modules/bcftools/query/nextflow.config b/tests/modules/bcftools/query/nextflow.config new file mode 100644 index 00000000..e4105006 --- /dev/null +++ b/tests/modules/bcftools/query/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_QUERY { + ext.args = "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" + } + +} diff --git a/tests/modules/bcftools/query/test.yml b/tests/modules/bcftools/query/test.yml index fbfda92b..aaa9af7b 100644 --- a/tests/modules/bcftools/query/test.yml +++ b/tests/modules/bcftools/query/test.yml @@ -1,5 +1,5 @@ - name: bcftools query - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query @@ -8,7 +8,7 @@ md5sum: c32a6d28f185822d8fe1eeb7e42ec155 - name: bcftools query with optional files - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query diff --git a/tests/modules/bcftools/reheader/main.nf b/tests/modules/bcftools/reheader/main.nf index 40863331..d1dcd8b8 100644 --- a/tests/modules/bcftools/reheader/main.nf +++ b/tests/modules/bcftools/reheader/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' addParams( options: [suffix: '.updated'] ) +include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' workflow test_bcftools_reheader_update_sequences { diff --git a/tests/modules/bcftools/reheader/nextflow.config b/tests/modules/bcftools/reheader/nextflow.config new file mode 100644 index 00000000..a377b26d --- /dev/null +++ b/tests/modules/bcftools/reheader/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_REHEADER { + ext.suffix = '.updated' + } + +} diff --git a/tests/modules/bcftools/reheader/test.yml b/tests/modules/bcftools/reheader/test.yml index 78337206..1ce0b104 100644 --- a/tests/modules/bcftools/reheader/test.yml +++ b/tests/modules/bcftools/reheader/test.yml @@ -1,5 +1,5 @@ - name: bcftools reheader test_bcftools_reheader_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -8,7 +8,7 @@ md5sum: 9e29f28038bfce77ee00022627209ed6 - name: bcftools reheader test_bcftools_reheader_new_header - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -17,7 +17,7 @@ md5sum: f7f536d889bbf5be40243252c394ee1f - name: bcftools reheader test_bcftools_reheader_new_header_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools diff --git a/tests/modules/bcftools/stats/main.nf b/tests/modules/bcftools/stats/main.nf index 4039c080..808a3330 100644 --- a/tests/modules/bcftools/stats/main.nf +++ b/tests/modules/bcftools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' addParams( options: [:] ) +include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' workflow test_bcftools_stats { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/stats/nextflow.config b/tests/modules/bcftools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/stats/test.yml b/tests/modules/bcftools/stats/test.yml index cd25fe66..d3587f95 100644 --- a/tests/modules/bcftools/stats/test.yml +++ b/tests/modules/bcftools/stats/test.yml @@ -1,5 +1,5 @@ - name: bcftools stats test_bcftools_stats - command: nextflow run tests/modules/bcftools/stats -entry test_bcftools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/stats -entry test_bcftools_stats -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/stats/nextflow.config tags: - bcftools - bcftools/stats diff --git a/tests/modules/bcftools/view/main.nf b/tests/modules/bcftools/view/main.nf index a8ac3b31..f45d0284 100644 --- a/tests/modules/bcftools/view/main.nf +++ b/tests/modules/bcftools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' workflow test_bcftools_view { diff --git a/tests/modules/bcftools/view/nextflow.config b/tests/modules/bcftools/view/nextflow.config new file mode 100644 index 00000000..e1723b89 --- /dev/null +++ b/tests/modules/bcftools/view/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_VIEW { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/view/test.yml b/tests/modules/bcftools/view/test.yml index 179e9a1c..fa926dd6 100644 --- a/tests/modules/bcftools/view/test.yml +++ b/tests/modules/bcftools/view/test.yml @@ -1,5 +1,5 @@ - name: bcftools view - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view @@ -8,7 +8,7 @@ md5sum: fc178eb342a91dc0d1d568601ad8f8e2 - name: bcftools view with optional files - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view diff --git a/tests/modules/bedtools/bamtobed/main.nf b/tests/modules/bedtools/bamtobed/main.nf index 41cf460a..e7635a3d 100644 --- a/tests/modules/bedtools/bamtobed/main.nf +++ b/tests/modules/bedtools/bamtobed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' addParams( options: [:] ) +include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' workflow test_bedtools_bamtobed { input = [ [ id:'test'], //meta map diff --git a/tests/modules/bedtools/bamtobed/nextflow.config b/tests/modules/bedtools/bamtobed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/bamtobed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/bamtobed/test.yml b/tests/modules/bedtools/bamtobed/test.yml index 106d125d..b038467c 100644 --- a/tests/modules/bedtools/bamtobed/test.yml +++ b/tests/modules/bedtools/bamtobed/test.yml @@ -1,5 +1,5 @@ - name: bedtools bamtobed - command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/bamtobed/nextflow.config tags: - bedtools - bedtools/bamtobed diff --git a/tests/modules/bedtools/complement/main.nf b/tests/modules/bedtools/complement/main.nf index 6456fe60..a1cca033 100644 --- a/tests/modules/bedtools/complement/main.nf +++ b/tests/modules/bedtools/complement/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' workflow test_bedtools_complement { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/complement/nextflow.config b/tests/modules/bedtools/complement/nextflow.config new file mode 100644 index 00000000..561fdead --- /dev/null +++ b/tests/modules/bedtools/complement/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_COMPLEMENT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/complement/test.yml b/tests/modules/bedtools/complement/test.yml index 2ebc6419..9dbeb36f 100644 --- a/tests/modules/bedtools/complement/test.yml +++ b/tests/modules/bedtools/complement/test.yml @@ -1,5 +1,5 @@ - name: bedtools complement - command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/complement/nextflow.config tags: - bedtools - bedtools/complement diff --git a/tests/modules/bedtools/genomecov/main.nf b/tests/modules/bedtools/genomecov/main.nf index 445ed078..b507a2cd 100644 --- a/tests/modules/bedtools/genomecov/main.nf +++ b/tests/modules/bedtools/genomecov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' workflow test_bedtools_genomecov_noscale { input = [ diff --git a/tests/modules/bedtools/genomecov/nextflow.config b/tests/modules/bedtools/genomecov/nextflow.config new file mode 100644 index 00000000..bc0e4aaf --- /dev/null +++ b/tests/modules/bedtools/genomecov/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_GENOMECOV { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/genomecov/test.yml b/tests/modules/bedtools/genomecov/test.yml index 477e6555..8f63bde9 100644 --- a/tests/modules/bedtools/genomecov/test.yml +++ b/tests/modules/bedtools/genomecov/test.yml @@ -1,5 +1,5 @@ - name: bedtools genomecov test_bedtools_genomecov_noscale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -8,7 +8,7 @@ md5sum: 66083198daca6c001d328ba9616e9b53 - name: bedtools genomecov test_bedtools_genomecov_nonbam_noscale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -17,7 +17,7 @@ md5sum: f47b58840087426e5b643d8dfd155c1f - name: bedtools genomecov test_bedtools_genomecov_scale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -26,7 +26,7 @@ md5sum: 01291b6e1beab72e046653e709eb0e10 - name: bedtools genomecov test_bedtools_genomecov_nonbam_scale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov diff --git a/tests/modules/bedtools/getfasta/main.nf b/tests/modules/bedtools/getfasta/main.nf index 194597ae..425c49d5 100644 --- a/tests/modules/bedtools/getfasta/main.nf +++ b/tests/modules/bedtools/getfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' workflow test_bedtools_getfasta { bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) diff --git a/tests/modules/bedtools/getfasta/nextflow.config b/tests/modules/bedtools/getfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/getfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/getfasta/test.yml b/tests/modules/bedtools/getfasta/test.yml index adf10da5..a455f861 100644 --- a/tests/modules/bedtools/getfasta/test.yml +++ b/tests/modules/bedtools/getfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools getfasta - command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/getfasta/nextflow.config tags: - bedtools - bedtools/getfasta diff --git a/tests/modules/bedtools/intersect/main.nf b/tests/modules/bedtools/intersect/main.nf index 73a9b30c..c17d03e6 100644 --- a/tests/modules/bedtools/intersect/main.nf +++ b/tests/modules/bedtools/intersect/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' workflow test_bedtools_intersect { input = [ diff --git a/tests/modules/bedtools/intersect/nextflow.config b/tests/modules/bedtools/intersect/nextflow.config new file mode 100644 index 00000000..c7d0c826 --- /dev/null +++ b/tests/modules/bedtools/intersect/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_INTERSECT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/intersect/test.yml b/tests/modules/bedtools/intersect/test.yml index c8c3ad6e..86fe70cd 100644 --- a/tests/modules/bedtools/intersect/test.yml +++ b/tests/modules/bedtools/intersect/test.yml @@ -1,5 +1,5 @@ - name: bedtools intersect test_bedtools_intersect - command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect @@ -8,7 +8,7 @@ md5sum: afcbf01c2f2013aad71dbe8e34f2c15c - name: bedtools intersect test_bedtools_intersect_bam - command: nextflow run tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect diff --git a/tests/modules/bedtools/makewindows/main.nf b/tests/modules/bedtools/makewindows/main.nf index 23c40a75..ce37de72 100644 --- a/tests/modules/bedtools/makewindows/main.nf +++ b/tests/modules/bedtools/makewindows/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -test_options = ['args': '-w 50 '] -include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' addParams( options: test_options ) +include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' workflow test_bedtools_makewindows { - - input = [ [ id:'test'], - file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + input = [ + [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] BEDTOOLS_MAKEWINDOWS ( input, true ) } diff --git a/tests/modules/bedtools/makewindows/nextflow.config b/tests/modules/bedtools/makewindows/nextflow.config new file mode 100644 index 00000000..e8b8c3ea --- /dev/null +++ b/tests/modules/bedtools/makewindows/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MAKEWINDOWS { + ext.args = '-w 50 ' + } + +} diff --git a/tests/modules/bedtools/makewindows/test.yml b/tests/modules/bedtools/makewindows/test.yml index c39d1c08..8accaa36 100644 --- a/tests/modules/bedtools/makewindows/test.yml +++ b/tests/modules/bedtools/makewindows/test.yml @@ -1,5 +1,5 @@ - name: bedtools makewindows test_bedtools_makewindows - command: nextflow run tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/makewindows/nextflow.config tags: - bedtools/makewindows - bedtools diff --git a/tests/modules/bedtools/maskfasta/main.nf b/tests/modules/bedtools/maskfasta/main.nf index 8c30fbdc..0da02ad3 100644 --- a/tests/modules/bedtools/maskfasta/main.nf +++ b/tests/modules/bedtools/maskfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' workflow test_bedtools_maskfasta { bed = [ [ id:'test'], diff --git a/tests/modules/bedtools/maskfasta/nextflow.config b/tests/modules/bedtools/maskfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/maskfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/maskfasta/test.yml b/tests/modules/bedtools/maskfasta/test.yml index f536a6eb..f1e8f35a 100644 --- a/tests/modules/bedtools/maskfasta/test.yml +++ b/tests/modules/bedtools/maskfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools maskfasta - command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/maskfasta/nextflow.config tags: - bedtools - bedtools/maskfasta diff --git a/tests/modules/bedtools/merge/main.nf b/tests/modules/bedtools/merge/main.nf index f11b804a..5fca0526 100644 --- a/tests/modules/bedtools/merge/main.nf +++ b/tests/modules/bedtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' workflow test_bedtools_merge { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/merge/nextflow.config b/tests/modules/bedtools/merge/nextflow.config new file mode 100644 index 00000000..e7d635dd --- /dev/null +++ b/tests/modules/bedtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MERGE { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/merge/test.yml b/tests/modules/bedtools/merge/test.yml index 62bc6860..5fc8b034 100644 --- a/tests/modules/bedtools/merge/test.yml +++ b/tests/modules/bedtools/merge/test.yml @@ -1,5 +1,5 @@ - name: bedtools merge - command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/merge/nextflow.config tags: - bedtools - bedtools/merge diff --git a/tests/modules/bedtools/slop/main.nf b/tests/modules/bedtools/slop/main.nf index 47c19781..e7136fdc 100644 --- a/tests/modules/bedtools/slop/main.nf +++ b/tests/modules/bedtools/slop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' addParams( options: [args: '-l 15 -r 30', suffix: '_out'] ) +include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' workflow test_bedtools_slop { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/slop/nextflow.config b/tests/modules/bedtools/slop/nextflow.config new file mode 100644 index 00000000..5dc03727 --- /dev/null +++ b/tests/modules/bedtools/slop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SLOP { + ext.args = '-l 15 -r 30' + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/slop/test.yml b/tests/modules/bedtools/slop/test.yml index 859b569e..0d49e66b 100644 --- a/tests/modules/bedtools/slop/test.yml +++ b/tests/modules/bedtools/slop/test.yml @@ -1,5 +1,5 @@ - name: bedtools slop - command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/slop/nextflow.config tags: - bedtools - bedtools/slop diff --git a/tests/modules/bedtools/sort/main.nf b/tests/modules/bedtools/sort/main.nf index b5d34e2f..342b4116 100644 --- a/tests/modules/bedtools/sort/main.nf +++ b/tests/modules/bedtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' workflow test_bedtools_sort { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/sort/nextflow.config b/tests/modules/bedtools/sort/nextflow.config new file mode 100644 index 00000000..6bb73232 --- /dev/null +++ b/tests/modules/bedtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SORT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/sort/test.yml b/tests/modules/bedtools/sort/test.yml index 1dd04507..173f0587 100644 --- a/tests/modules/bedtools/sort/test.yml +++ b/tests/modules/bedtools/sort/test.yml @@ -1,5 +1,5 @@ - name: bedtools sort - command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/sort/nextflow.config tags: - bedtools - bedtools/sort diff --git a/tests/modules/bedtools/subtract/main.nf b/tests/modules/bedtools/subtract/main.nf index 9997f08c..2a0e6eab 100644 --- a/tests/modules/bedtools/subtract/main.nf +++ b/tests/modules/bedtools/subtract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' addParams( options: [:] ) +include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' workflow test_bedtools_subtract { input = [ diff --git a/tests/modules/bedtools/subtract/nextflow.config b/tests/modules/bedtools/subtract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/subtract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/subtract/test.yml b/tests/modules/bedtools/subtract/test.yml index fd8660fc..52b57436 100644 --- a/tests/modules/bedtools/subtract/test.yml +++ b/tests/modules/bedtools/subtract/test.yml @@ -1,5 +1,5 @@ - name: bedtools subtract - command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/subtract/nextflow.config tags: - bedtools - bedtools/subtract diff --git a/tests/modules/bismark/align/main.nf b/tests/modules/bismark/align/main.nf index 1f1fcdce..fe6d616a 100644 --- a/tests/modules/bismark/align/main.nf +++ b/tests/modules/bismark/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_single_end' ] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_paired_end' ] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' // // Test with single-end data diff --git a/tests/modules/bismark/align/nextflow.config b/tests/modules/bismark/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/align/test.yml b/tests/modules/bismark/align/test.yml index 42dc44b3..ffae05af 100644 --- a/tests/modules/bismark/align/test.yml +++ b/tests/modules/bismark/align/test.yml @@ -1,19 +1,19 @@ - name: bismark align single-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_single_end/test.methylated_1_bismark_bt2.bam + - path: output/bismark/test.methylated_1_bismark_bt2.bam md5sum: dca4ba9ff705b70446f812e59bdb1a32 - - path: output/test_single_end/test.methylated_1_bismark_bt2_SE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_SE_report.txt - name: bismark align paired-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_paired_end/test.methylated_1_bismark_bt2_pe.bam + - path: output/bismark/test.methylated_1_bismark_bt2_pe.bam md5sum: 43943b1f30d056fcbd9ed26061ea0583 - - path: output/test_paired_end/test.methylated_1_bismark_bt2_PE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_PE_report.txt diff --git a/tests/modules/bismark/deduplicate/main.nf b/tests/modules/bismark/deduplicate/main.nf index fc44745c..ad97d66a 100644 --- a/tests/modules/bismark/deduplicate/main.nf +++ b/tests/modules/bismark/deduplicate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' workflow test_bismark_deduplicate { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/deduplicate/nextflow.config b/tests/modules/bismark/deduplicate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/deduplicate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/deduplicate/test.yml b/tests/modules/bismark/deduplicate/test.yml index 604c1023..12099750 100644 --- a/tests/modules/bismark/deduplicate/test.yml +++ b/tests/modules/bismark/deduplicate/test.yml @@ -1,5 +1,5 @@ - name: bismark deduplicate test workflow - command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c ./tests/config/nextflow.config -c ./tests/modules/bismark/deduplicate/nextflow.config tags: - bismark - bismark/deduplicate diff --git a/tests/modules/bismark/genomepreparation/main.nf b/tests/modules/bismark/genomepreparation/main.nf index ab847171..a9111af3 100644 --- a/tests/modules/bismark/genomepreparation/main.nf +++ b/tests/modules/bismark/genomepreparation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' workflow test_bismark_genomepreparation { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bismark/genomepreparation/nextflow.config b/tests/modules/bismark/genomepreparation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/genomepreparation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/genomepreparation/test.yml b/tests/modules/bismark/genomepreparation/test.yml index 15a7e7d6..a0d3c072 100644 --- a/tests/modules/bismark/genomepreparation/test.yml +++ b/tests/modules/bismark/genomepreparation/test.yml @@ -1,5 +1,5 @@ - name: bismark genomepreparation test workflow - command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c ./tests/config/nextflow.config -c ./tests/modules/bismark/genomepreparation/nextflow.config tags: - bismark - bismark/genomepreparation diff --git a/tests/modules/bismark/methylationextractor/main.nf b/tests/modules/bismark/methylationextractor/main.nf index 0b3f77a1..ed857fe8 100644 --- a/tests/modules/bismark/methylationextractor/main.nf +++ b/tests/modules/bismark/methylationextractor/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' workflow test_bismark_methylationextractor { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/methylationextractor/nextflow.config b/tests/modules/bismark/methylationextractor/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/methylationextractor/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/methylationextractor/test.yml b/tests/modules/bismark/methylationextractor/test.yml index 4505c428..f25b7646 100644 --- a/tests/modules/bismark/methylationextractor/test.yml +++ b/tests/modules/bismark/methylationextractor/test.yml @@ -1,5 +1,5 @@ - name: bismark methylation extractor test workflow - command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c ./tests/config/nextflow.config -c ./tests/modules/bismark/methylationextractor/nextflow.config tags: - bismark - bismark/methylationextractor diff --git a/tests/modules/bismark/report/main.nf b/tests/modules/bismark/report/main.nf index 945d24ed..f80fb2bc 100644 --- a/tests/modules/bismark/report/main.nf +++ b/tests/modules/bismark/report/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' workflow test_bismark_report { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/report/nextflow.config b/tests/modules/bismark/report/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/report/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/report/test.yml b/tests/modules/bismark/report/test.yml index 7e85e4dd..9195994c 100644 --- a/tests/modules/bismark/report/test.yml +++ b/tests/modules/bismark/report/test.yml @@ -1,5 +1,5 @@ - name: bismark report test workflow - command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c ./tests/config/nextflow.config -c ./tests/modules/bismark/report/nextflow.config tags: - bismark - bismark/report diff --git a/tests/modules/bismark/summary/main.nf b/tests/modules/bismark/summary/main.nf index 4170d19a..8eabe51f 100644 --- a/tests/modules/bismark/summary/main.nf +++ b/tests/modules/bismark/summary/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' workflow test_bismark_summary { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/summary/nextflow.config b/tests/modules/bismark/summary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/summary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/summary/test.yml b/tests/modules/bismark/summary/test.yml index 06478873..3b5196b9 100644 --- a/tests/modules/bismark/summary/test.yml +++ b/tests/modules/bismark/summary/test.yml @@ -1,5 +1,5 @@ - name: bismark summary test workflow - command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c ./tests/config/nextflow.config -c ./tests/modules/bismark/summary/nextflow.config tags: - bismark - bismark/summary diff --git a/tests/modules/blast/blastn/main.nf b/tests/modules/blast/blastn/main.nf index fd690dcc..3c8496dc 100644 --- a/tests/modules/blast/blastn/main.nf +++ b/tests/modules/blast/blastn/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) -include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' addParams( options: [:] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' +include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' workflow test_blast_blastn { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/blastn/nextflow.config b/tests/modules/blast/blastn/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/blastn/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/blastn/test.yml b/tests/modules/blast/blastn/test.yml index 98f76921..17522c9b 100644 --- a/tests/modules/blast/blastn/test.yml +++ b/tests/modules/blast/blastn/test.yml @@ -1,5 +1,5 @@ - name: blast_blastn - command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c ./tests/config/nextflow.config -c ./tests/modules/blast/blastn/nextflow.config tags: - blast - blast/blastn diff --git a/tests/modules/blast/makeblastdb/main.nf b/tests/modules/blast/makeblastdb/main.nf index 48b39f22..9d778457 100644 --- a/tests/modules/blast/makeblastdb/main.nf +++ b/tests/modules/blast/makeblastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' workflow test_blast_makeblastdb { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/makeblastdb/nextflow.config b/tests/modules/blast/makeblastdb/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/makeblastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/makeblastdb/test.yml b/tests/modules/blast/makeblastdb/test.yml index 7df17968..3b59f3f6 100644 --- a/tests/modules/blast/makeblastdb/test.yml +++ b/tests/modules/blast/makeblastdb/test.yml @@ -1,5 +1,5 @@ - name: blast_makeblastdb - command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c ./tests/config/nextflow.config -c ./tests/modules/blast/makeblastdb/nextflow.config tags: - blast - blast/makeblastdb diff --git a/tests/modules/bowtie/align/main.nf b/tests/modules/bowtie/align/main.nf index b2c8059a..e773cd38 100644 --- a/tests/modules/bowtie/align/main.nf +++ b/tests/modules/bowtie/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [:] ) -include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' addParams( options: [:] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' +include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' workflow test_bowtie_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) @@ -16,10 +19,13 @@ workflow test_bowtie_align_single_end { } workflow test_bowtie_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) diff --git a/tests/modules/bowtie/align/nextflow.config b/tests/modules/bowtie/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/align/test.yml b/tests/modules/bowtie/align/test.yml index 76d63d68..1f8d1294 100644 --- a/tests/modules/bowtie/align/test.yml +++ b/tests/modules/bowtie/align/test.yml @@ -1,5 +1,5 @@ - name: bowtie align single-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align @@ -7,36 +7,36 @@ - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out md5sum: 4b9140ceadb8a18ae9330885370f8a0b - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 - name: bowtie align paired-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align files: - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 diff --git a/tests/modules/bowtie/build_test/main.nf b/tests/modules/bowtie/build_test/main.nf index a89091a8..7a36fb55 100644 --- a/tests/modules/bowtie/build_test/main.nf +++ b/tests/modules/bowtie/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [publish_dir:'bowtie'] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' workflow test_bowtie_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie/build_test/nextflow.config b/tests/modules/bowtie/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/build_test/test.yml b/tests/modules/bowtie/build_test/test.yml index c6b765c9..c51d1e8a 100644 --- a/tests/modules/bowtie/build_test/test.yml +++ b/tests/modules/bowtie/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie build - command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/build/nextflow.config tags: - bowtie - bowtie/build diff --git a/tests/modules/bowtie2/align/main.nf b/tests/modules/bowtie2/align/main.nf index 20602f30..8c8e3ab8 100644 --- a/tests/modules/bowtie2/align/main.nf +++ b/tests/modules/bowtie2/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [:] ) -include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' addParams( options: [:] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' +include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' workflow test_bowtie2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE2_BUILD ( fasta ) @@ -16,11 +19,15 @@ workflow test_bowtie2_align_single_end { } workflow test_bowtie2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + BOWTIE2_BUILD ( fasta ) BOWTIE2_ALIGN ( input, BOWTIE2_BUILD.out.index ) } diff --git a/tests/modules/bowtie2/align/nextflow.config b/tests/modules/bowtie2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/align/test.yml b/tests/modules/bowtie2/align/test.yml index 05952b76..95d48b88 100644 --- a/tests/modules/bowtie2/align/test.yml +++ b/tests/modules/bowtie2/align/test.yml @@ -1,41 +1,41 @@ - name: bowtie2 align single-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 - name: bowtie2 align paired-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 diff --git a/tests/modules/bowtie2/build_test/main.nf b/tests/modules/bowtie2/build_test/main.nf index 2b41fab2..f1d35083 100644 --- a/tests/modules/bowtie2/build_test/main.nf +++ b/tests/modules/bowtie2/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [publish_dir:'bowtie2'] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' workflow test_bowtie2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie2/build_test/nextflow.config b/tests/modules/bowtie2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/build_test/test.yml b/tests/modules/bowtie2/build_test/test.yml index 3fd049b9..88e6c3ad 100644 --- a/tests/modules/bowtie2/build_test/test.yml +++ b/tests/modules/bowtie2/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie2 build - command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/build/nextflow.config tags: - bowtie2 - bowtie2/build diff --git a/tests/modules/bwa/aln/main.nf b/tests/modules/bwa/aln/main.nf index feb7473d..909e7a2d 100644 --- a/tests/modules/bwa/aln/main.nf +++ b/tests/modules/bwa/aln/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' // // Test with single-end data // workflow test_bwa_aln_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwa_aln_single_end { // Test with paired-end data // workflow test_bwa_aln_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) diff --git a/tests/modules/bwa/aln/nextflow.config b/tests/modules/bwa/aln/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/aln/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/aln/test.yml b/tests/modules/bwa/aln/test.yml index 08848143..c89c47be 100644 --- a/tests/modules/bwa/aln/test.yml +++ b/tests/modules/bwa/aln/test.yml @@ -1,24 +1,24 @@ - name: bwa aln single-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln files: - path: ./output/bwa/test.sai md5sum: aaaf39b6814c96ca1a5eacc662adf926 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa aln paired-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln @@ -27,13 +27,13 @@ md5sum: aaaf39b6814c96ca1a5eacc662adf926 - path: ./output/bwa/test.2.sai md5sum: b4f185d9b4cb256dd5c377070a536124 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/index/main.nf b/tests/modules/bwa/index/main.nf index 30d31202..fe040cb2 100644 --- a/tests/modules/bwa/index/main.nf +++ b/tests/modules/bwa/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [publish_dir:'bwa'] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' workflow test_bwa_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwa/index/nextflow.config b/tests/modules/bwa/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index 3fe8663d..a9dab91d 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -1,5 +1,5 @@ - name: bwa index test_bwa_index - command: nextflow run tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c ./tests/config/nextflow.config -c ./tests/modules/bwa/index/nextflow.config tags: - bwa - bwa/index diff --git a/tests/modules/bwa/mem/main.nf b/tests/modules/bwa/mem/main.nf index bac51d23..117cbb4d 100644 --- a/tests/modules/bwa/mem/main.nf +++ b/tests/modules/bwa/mem/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' // // Test with single-end data // workflow test_bwa_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwa_mem_single_end { // Test with paired-end data // workflow test_bwa_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) diff --git a/tests/modules/bwa/mem/nextflow.config b/tests/modules/bwa/mem/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/mem/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/mem/test.yml b/tests/modules/bwa/mem/test.yml index df1988b5..93535043 100644 --- a/tests/modules/bwa/mem/test.yml +++ b/tests/modules/bwa/mem/test.yml @@ -1,35 +1,35 @@ - name: bwa mem single-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa mem paired-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/sampe/main.nf b/tests/modules/bwa/sampe/main.nf index 017f27e5..abd25566 100644 --- a/tests/modules/bwa/sampe/main.nf +++ b/tests/modules/bwa/sampe/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' workflow test_bwa_sampe { diff --git a/tests/modules/bwa/sampe/nextflow.config b/tests/modules/bwa/sampe/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/sampe/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml index ba5e704d..fb6d7708 100644 --- a/tests/modules/bwa/sampe/test.yml +++ b/tests/modules/bwa/sampe/test.yml @@ -1,5 +1,5 @@ - name: bwa sampe - command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c ./tests/config/nextflow.config -c ./tests/modules/bwa/sampe/nextflow.config tags: - bwa - bwa/sampe diff --git a/tests/modules/bwa/samse/main.nf b/tests/modules/bwa/samse/main.nf index 87a7c7b1..17912c36 100644 --- a/tests/modules/bwa/samse/main.nf +++ b/tests/modules/bwa/samse/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' workflow test_bwa_samse { diff --git a/tests/modules/bwa/samse/nextflow.config b/tests/modules/bwa/samse/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/samse/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml index 597844d4..5a2fe1e3 100644 --- a/tests/modules/bwa/samse/test.yml +++ b/tests/modules/bwa/samse/test.yml @@ -1,5 +1,5 @@ - name: bwa samse - command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c ./tests/config/nextflow.config -c ./tests/modules/bwa/samse/nextflow.config tags: - bwa - bwa/samse diff --git a/tests/modules/bwamem2/index/main.nf b/tests/modules/bwamem2/index/main.nf index bb7d0803..fe88f8f7 100644 --- a/tests/modules/bwamem2/index/main.nf +++ b/tests/modules/bwamem2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [publish_dir:'bwamem2'] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' workflow test_bwamem2_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwamem2/index/nextflow.config b/tests/modules/bwamem2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwamem2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwamem2/index/test.yml b/tests/modules/bwamem2/index/test.yml index d9d15c53..efada6ec 100644 --- a/tests/modules/bwamem2/index/test.yml +++ b/tests/modules/bwamem2/index/test.yml @@ -1,5 +1,5 @@ - name: bwamem2 index - command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/index/nextflow.config tags: - bwamem2 - bwamem2/index diff --git a/tests/modules/bwamem2/mem/main.nf b/tests/modules/bwamem2/mem/main.nf index 5abda8bb..2ab557e6 100644 --- a/tests/modules/bwamem2/mem/main.nf +++ b/tests/modules/bwamem2/mem/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [:] ) -include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' addParams( options: [:] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' // // Test with single-end data // workflow test_bwamem2_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwamem2_mem_single_end { // Test with paired-end data // workflow test_bwamem2_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) diff --git a/tests/modules/bwamem2/mem/nextflow.config b/tests/modules/bwamem2/mem/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwamem2/mem/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwamem2/mem/test.yml b/tests/modules/bwamem2/mem/test.yml index cc2fe2a8..c1724bc0 100644 --- a/tests/modules/bwamem2/mem/test.yml +++ b/tests/modules/bwamem2/mem/test.yml @@ -1,35 +1,35 @@ - name: bwamem2 mem single-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - name: bwamem2 mem paired-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 diff --git a/tests/modules/bwameth/align/main.nf b/tests/modules/bwameth/align/main.nf index fb8cad6a..8066941c 100644 --- a/tests/modules/bwameth/align/main.nf +++ b/tests/modules/bwameth/align/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) -include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' addParams( options: [:] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' +include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' // // Test with single-end data // workflow test_bwameth_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwameth_align_single_end { // Test with paired-end data // workflow test_bwameth_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) diff --git a/tests/modules/bwameth/align/nextflow.config b/tests/modules/bwameth/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/align/test.yml b/tests/modules/bwameth/align/test.yml index 5cf4b84d..f921b5f4 100644 --- a/tests/modules/bwameth/align/test.yml +++ b/tests/modules/bwameth/align/test.yml @@ -1,5 +1,5 @@ - name: bwameth align single-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align @@ -7,7 +7,7 @@ - path: output/bwameth/test.bam - name: bwameth align paired-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align diff --git a/tests/modules/bwameth/index/main.nf b/tests/modules/bwameth/index/main.nf index 46662201..b70fd1f7 100644 --- a/tests/modules/bwameth/index/main.nf +++ b/tests/modules/bwameth/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [publish_dir:'bwameth'] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' workflow test_bwameth_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwameth/index/nextflow.config b/tests/modules/bwameth/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/index/test.yml b/tests/modules/bwameth/index/test.yml index 0cc7922e..9783c511 100644 --- a/tests/modules/bwameth/index/test.yml +++ b/tests/modules/bwameth/index/test.yml @@ -1,5 +1,5 @@ - name: bwameth index test workflow - command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/index/nextflow.config tags: - bwameth - bwameth/index diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf index a110a8ab..430c71fa 100644 --- a/tests/modules/cat/cat/main.nf +++ b/tests/modules/cat/cat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' addParams( options: [:] ) +include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' workflow test_cat_unzipped_unzipped { diff --git a/tests/modules/cat/cat/nextflow.config b/tests/modules/cat/cat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/cat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index 2f234a01..d7973042 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -1,5 +1,5 @@ - name: cat unzipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -8,7 +8,7 @@ md5sum: f44b33a0e441ad58b2d3700270e2dbe2 - name: cat zipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -16,7 +16,7 @@ - path: output/cat/cat.txt.gz - name: cat zipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -25,7 +25,7 @@ md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 - name: cat unzipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index 027bd108..c3da91d2 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' addParams( options: [publish_dir:'cat'] ) +include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' workflow test_cat_fastq_single_end { input = [ diff --git a/tests/modules/cat/fastq/nextflow.config b/tests/modules/cat/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 9a5af25c..89ddf331 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -1,5 +1,5 @@ - name: cat fastq single-end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq @@ -8,7 +8,7 @@ md5sum: 59f6dbe193741bb40f498f254aeb2e99 - name: cat fastq fastqc_paired_end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq diff --git a/tests/modules/cellranger/mkref/main.nf b/tests/modules/cellranger/mkref/main.nf index b20a68db..ad98ed1a 100644 --- a/tests/modules/cellranger/mkref/main.nf +++ b/tests/modules/cellranger/mkref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' addParams( options: [:] ) +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' workflow test_cellranger_mkref { diff --git a/tests/modules/cellranger/mkref/nextflow.config b/tests/modules/cellranger/mkref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkref/test.yml b/tests/modules/cellranger/mkref/test.yml index 5e60819e..eb01e9e2 100644 --- a/tests/modules/cellranger/mkref/test.yml +++ b/tests/modules/cellranger/mkref/test.yml @@ -1,5 +1,5 @@ - name: cellranger mkref test_cellranger_mkref - command: nextflow run tests/modules/cellranger/mkref -entry test_cellranger_mkref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cellranger/mkref -entry test_cellranger_mkref -c ./tests/config/nextflow.config -c ./tests/modules/cellranger/mkref/nextflow.config tags: - cellranger - cellranger/mkref diff --git a/tests/modules/checkm/lineagewf/main.nf b/tests/modules/checkm/lineagewf/main.nf index 94309896..e914774c 100644 --- a/tests/modules/checkm/lineagewf/main.nf +++ b/tests/modules/checkm/lineagewf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' addParams( options: [:] ) +include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' workflow test_checkm_lineagewf { diff --git a/tests/modules/checkm/lineagewf/nextflow.config b/tests/modules/checkm/lineagewf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/checkm/lineagewf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/checkm/lineagewf/test.yml b/tests/modules/checkm/lineagewf/test.yml index 768601b0..6749f6aa 100644 --- a/tests/modules/checkm/lineagewf/test.yml +++ b/tests/modules/checkm/lineagewf/test.yml @@ -1,5 +1,5 @@ - name: checkm lineagewf - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf @@ -16,7 +16,7 @@ - "UID1" - name: checkm lineagewf_multi - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf diff --git a/tests/modules/chromap/chromap/main.nf b/tests/modules/chromap/chromap/main.nf index a5a1fc86..5522f2b5 100644 --- a/tests/modules/chromap/chromap/main.nf +++ b/tests/modules/chromap/chromap/main.nf @@ -2,19 +2,20 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' addParams( options: ['args': '--SAM'] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' workflow test_chromap_chromap_single_end { // Test single-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -31,8 +32,6 @@ workflow test_chromap_chromap_single_end { workflow test_chromap_chromap_paired_end { // Test paired-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -40,6 +39,7 @@ workflow test_chromap_chromap_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -56,8 +56,6 @@ workflow test_chromap_chromap_paired_end { workflow test_chromap_chromap_paired_bam { // Test paired-end and bam output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -65,6 +63,7 @@ workflow test_chromap_chromap_paired_bam { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_SAM ( diff --git a/tests/modules/chromap/chromap/nextflow.config b/tests/modules/chromap/chromap/nextflow.config new file mode 100644 index 00000000..1e979bb9 --- /dev/null +++ b/tests/modules/chromap/chromap/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CHROMAP_CHROMAP_SAM { + ext.args = '--SAM' + } + +} diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index b2ce8137..20a51e2b 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -1,5 +1,5 @@ - name: chromap chromap test_chromap_chromap_single_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -10,7 +10,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: chromap chromap test_chromap_chromap_paired_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -21,7 +21,7 @@ md5sum: cafd8fb21977f5ae69e9008b220ab169 - name: chromap chromap test_chromap_chromap_paired_bam - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap diff --git a/tests/modules/chromap/index/main.nf b/tests/modules/chromap/index/main.nf index 997baba1..18b42006 100644 --- a/tests/modules/chromap/index/main.nf +++ b/tests/modules/chromap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' workflow test_chromap_index { diff --git a/tests/modules/chromap/index/nextflow.config b/tests/modules/chromap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/chromap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml index 0a99a3a0..74cfadfc 100644 --- a/tests/modules/chromap/index/test.yml +++ b/tests/modules/chromap/index/test.yml @@ -1,5 +1,5 @@ - name: chromap index test_chromap_index - command: nextflow run tests/modules/chromap/index -entry test_chromap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/index -entry test_chromap_index -c ./tests/config/nextflow.config -c ./tests/modules/chromap/index/nextflow.config tags: - chromap/index - chromap diff --git a/tests/modules/clonalframeml/main.nf b/tests/modules/clonalframeml/main.nf index 35ecaa79..73773113 100644 --- a/tests/modules/clonalframeml/main.nf +++ b/tests/modules/clonalframeml/main.nf @@ -2,13 +2,15 @@ nextflow.enable.dsl = 2 -include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' addParams( options: [:] ) +include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' workflow test_clonalframeml { - - input = [ [ id:'test' ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.newick", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.fa.gz", checkIfExists: true),] + + input = [ + [ id:'test' ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_nwk'], checkIfExists: true), + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_gz'], checkIfExists: true) + ] CLONALFRAMEML ( input ) } diff --git a/tests/modules/clonalframeml/nextflow.config b/tests/modules/clonalframeml/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/clonalframeml/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/clonalframeml/test.yml b/tests/modules/clonalframeml/test.yml index f2b68115..8ea11d16 100644 --- a/tests/modules/clonalframeml/test.yml +++ b/tests/modules/clonalframeml/test.yml @@ -1,5 +1,5 @@ - name: clonalframeml test_clonalframeml - command: nextflow run tests/modules/clonalframeml -entry test_clonalframeml -c tests/config/nextflow.config + command: nextflow run ./tests/modules/clonalframeml -entry test_clonalframeml -c ./tests/config/nextflow.config -c ./tests/modules/clonalframeml/nextflow.config tags: - clonalframeml files: diff --git a/tests/modules/cmseq/polymut/main.nf b/tests/modules/cmseq/polymut/main.nf index 729ed38f..df6a0ac1 100644 --- a/tests/modules/cmseq/polymut/main.nf +++ b/tests/modules/cmseq/polymut/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' addParams( options: [:] ) +include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' workflow test_cmseq_polymut_1 { diff --git a/tests/modules/cmseq/polymut/nextflow.config b/tests/modules/cmseq/polymut/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cmseq/polymut/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cmseq/polymut/test.yml b/tests/modules/cmseq/polymut/test.yml index 2a989cb9..05887fa8 100644 --- a/tests/modules/cmseq/polymut/test.yml +++ b/tests/modules/cmseq/polymut/test.yml @@ -1,5 +1,5 @@ - name: cmseq polymut test_cmseq_polymut_1 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq @@ -8,7 +8,7 @@ md5sum: fd325c1724ee23d132a9115c64494efc - name: cmseq polymut test_cmseq_polymut_2 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq @@ -17,7 +17,7 @@ md5sum: fd325c1724ee23d132a9115c64494efc - name: cmseq polymut test_cmseq_polymut_3 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq diff --git a/tests/modules/cnvkit/batch/main.nf b/tests/modules/cnvkit/batch/main.nf index 5d92afaa..6b40dec6 100755 --- a/tests/modules/cnvkit/batch/main.nf +++ b/tests/modules/cnvkit/batch/main.nf @@ -2,61 +2,54 @@ nextflow.enable.dsl = 2 -include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) -include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn --method wgs' ] ) -include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--method wgs' ] ) - +include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' workflow test_cnvkit_hybrid { - tumor = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test' ], // meta map - tumor, - normal - ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) CNVKIT_HYBRID ( input, fasta, targets, [] ) } workflow test_cnvkit_wgs { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - normal - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) CNVKIT_WGS ( input, fasta, [], [] ) } - workflow test_cnvkit_cram { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - normal - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) CNVKIT_WGS ( input, fasta, [], [] ) } - - workflow test_cnvkit_tumoronly { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - [ ] - ] + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + [] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) reference = file(params.test_data['generic']['cnn']['reference'], checkIfExists: true) diff --git a/tests/modules/cnvkit/batch/nextflow.config b/tests/modules/cnvkit/batch/nextflow.config new file mode 100644 index 00000000..b8a8fc3f --- /dev/null +++ b/tests/modules/cnvkit/batch/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CNVKIT_HYBRID { + ext.args = '--output-reference reference.cnn' + } + + withName: CNVKIT_WGS { + ext.args = '--output-reference reference.cnn --method wgs' + } + + withName: CNVKIT_TUMORONLY { + ext.args = '--method wgs' + } + +} diff --git a/tests/modules/cnvkit/batch/test.yml b/tests/modules/cnvkit/batch/test.yml index 96ea670c..57af3603 100755 --- a/tests/modules/cnvkit/batch/test.yml +++ b/tests/modules/cnvkit/batch/test.yml @@ -1,5 +1,5 @@ - name: cnvkit batch test_cnvkit_hybrid - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -28,7 +28,7 @@ md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 - name: cnvkit batch test_cnvkit_wgs - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -59,7 +59,7 @@ md5sum: 6ae6b3fce7299eedca6133d911c38fe1 - name: cnvkit batch test_cnvkit_cram - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_cram -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -90,7 +90,7 @@ md5sum: 6ae6b3fce7299eedca6133d911c38fe1 - name: cnvkit batch test_cnvkit_tumoronly - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit diff --git a/tests/modules/cooler/cload/main.nf b/tests/modules/cooler/cload/main.nf index dd9b3e98..170b7e11 100644 --- a/tests/modules/cooler/cload/main.nf +++ b/tests/modules/cooler/cload/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairix'] ) -include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N'] ) -include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'tabix'] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) -include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) -include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_cload_pairix { diff --git a/tests/modules/cooler/cload/nextflow.config b/tests/modules/cooler/cload/nextflow.config new file mode 100644 index 00000000..610a5425 --- /dev/null +++ b/tests/modules/cooler/cload/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_CLOAD { + ext.args = 'pairix' + } + + withName: COOLER_CLOAD_PAIRS { + ext.args = 'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N' + } + + withName: COOLER_CLOAD_TABIX { + ext.args = 'tabix' + } + +} diff --git a/tests/modules/cooler/cload/test.yml b/tests/modules/cooler/cload/test.yml index 7cb9a0bd..f99f4624 100644 --- a/tests/modules/cooler/cload/test.yml +++ b/tests/modules/cooler/cload/test.yml @@ -1,5 +1,5 @@ - name: cooler cload test_cooler_cload_pairix - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler @@ -9,7 +9,7 @@ md5sum: 0cd85311089669688ec17468eae02111 - name: cooler cload test_cooler_cload_pairs - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairs -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler @@ -19,7 +19,7 @@ md5sum: 7f832733fc7853ebb1937b33e4c1e0de - name: cooler cload test_cooler_cload_tabix - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_tabix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_tabix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler diff --git a/tests/modules/cooler/digest/main.nf b/tests/modules/cooler/digest/main.nf index 817c9081..4dfa25be 100644 --- a/tests/modules/cooler/digest/main.nf +++ b/tests/modules/cooler/digest/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' addParams( options: [:] ) +include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' workflow test_cooler_digest { diff --git a/tests/modules/cooler/digest/nextflow.config b/tests/modules/cooler/digest/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/digest/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/digest/test.yml b/tests/modules/cooler/digest/test.yml index b594a232..80430ed7 100644 --- a/tests/modules/cooler/digest/test.yml +++ b/tests/modules/cooler/digest/test.yml @@ -1,5 +1,5 @@ - name: cooler digest test_cooler_digest - command: nextflow run tests/modules/cooler/digest -entry test_cooler_digest -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/digest -entry test_cooler_digest -c ./tests/config/nextflow.config -c ./tests/modules/cooler/digest/nextflow.config tags: - cooler/digest - cooler diff --git a/tests/modules/cooler/dump/main.nf b/tests/modules/cooler/dump/main.nf index deeeb21f..d80ee0d7 100644 --- a/tests/modules/cooler/dump/main.nf +++ b/tests/modules/cooler/dump/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_dump { diff --git a/tests/modules/cooler/dump/nextflow.config b/tests/modules/cooler/dump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/dump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/dump/test.yml b/tests/modules/cooler/dump/test.yml index ccfc5f47..6f81c7a9 100644 --- a/tests/modules/cooler/dump/test.yml +++ b/tests/modules/cooler/dump/test.yml @@ -1,5 +1,5 @@ - name: cooler dump test_cooler_dump - command: nextflow run tests/modules/cooler/dump -entry test_cooler_dump -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/dump -entry test_cooler_dump -c ./tests/config/nextflow.config -c ./tests/modules/cooler/dump/nextflow.config tags: - cooler/dump - cooler diff --git a/tests/modules/cooler/merge/main.nf b/tests/modules/cooler/merge/main.nf index 564660c5..81336984 100644 --- a/tests/modules/cooler/merge/main.nf +++ b/tests/modules/cooler/merge/main.nf @@ -2,15 +2,19 @@ nextflow.enable.dsl = 2 -include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' addParams( options: [publish_files:[:]] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_merge { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), - file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true)] - ] + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true) + ] + ] - COOLER_MERGE ( input ).cool | COOLER_DUMP + COOLER_MERGE ( input ) + COOLER_DUMP ( COOLER_MERGE.out.cool, "" ) } diff --git a/tests/modules/cooler/merge/nextflow.config b/tests/modules/cooler/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/merge/test.yml b/tests/modules/cooler/merge/test.yml index 3ac388e7..c884ba5e 100644 --- a/tests/modules/cooler/merge/test.yml +++ b/tests/modules/cooler/merge/test.yml @@ -1,5 +1,5 @@ - name: cooler merge test_cooler_merge - command: nextflow run tests/modules/cooler/merge -entry test_cooler_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/merge -entry test_cooler_merge -c ./tests/config/nextflow.config -c ./tests/modules/cooler/merge/nextflow.config tags: - cooler/merge - cooler diff --git a/tests/modules/cooler/zoomify/main.nf b/tests/modules/cooler/zoomify/main.nf index 72c33983..42edadb8 100644 --- a/tests/modules/cooler/zoomify/main.nf +++ b/tests/modules/cooler/zoomify/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' addParams( options: ['args':'-r 2,4,8', publish_files:[:]] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' workflow test_cooler_zoomify { - input = [ [ id:'test' ], // meta map - file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true)] + + input = [ + [ id:'test' ], // meta map + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true) + ] COOLER_ZOOMIFY ( input ) - COOLER_DUMP(COOLER_ZOOMIFY.out.mcool, "/resolutions/2") } diff --git a/tests/modules/cooler/zoomify/nextflow.config b/tests/modules/cooler/zoomify/nextflow.config new file mode 100644 index 00000000..d4c3503f --- /dev/null +++ b/tests/modules/cooler/zoomify/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_ZOOMIFY { + ext.args = '-r 2,4,8' + } + +} diff --git a/tests/modules/cooler/zoomify/test.yml b/tests/modules/cooler/zoomify/test.yml index 79a5af2c..3afdb8a6 100644 --- a/tests/modules/cooler/zoomify/test.yml +++ b/tests/modules/cooler/zoomify/test.yml @@ -1,8 +1,8 @@ - name: cooler zoomify test_cooler_zoomify - command: nextflow run tests/modules/cooler/zoomify -entry test_cooler_zoomify -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/zoomify -entry test_cooler_zoomify -c ./tests/config/nextflow.config -c ./tests/modules/cooler/zoomify/nextflow.config tags: - cooler - cooler/zoomify files: - path: output/cooler/test.bedpe - md5sum: 8d792beb609fff62b536c326661f9507 + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 diff --git a/tests/modules/csvtk/concat/main.nf b/tests/modules/csvtk/concat/main.nf index 22b0205f..aee31679 100644 --- a/tests/modules/csvtk/concat/main.nf +++ b/tests/modules/csvtk/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' addParams( options: [:] ) +include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' workflow test_csvtk_concat { diff --git a/tests/modules/csvtk/concat/nextflow.config b/tests/modules/csvtk/concat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/csvtk/concat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/csvtk/concat/test.yml b/tests/modules/csvtk/concat/test.yml index 0fe9c604..11a2af67 100644 --- a/tests/modules/csvtk/concat/test.yml +++ b/tests/modules/csvtk/concat/test.yml @@ -1,5 +1,5 @@ - name: csvtk concat - command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/concat/nextflow.config tags: - csvtk - csvtk/concat diff --git a/tests/modules/csvtk/split/main.nf b/tests/modules/csvtk/split/main.nf index 8dfd4053..31d24d61 100644 --- a/tests/modules/csvtk/split/main.nf +++ b/tests/modules/csvtk/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' addParams( options: [args: "-C '&' --fields 'first_name' "]) +include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' workflow test_csvtk_split_tsv { diff --git a/tests/modules/csvtk/split/nextflow.config b/tests/modules/csvtk/split/nextflow.config new file mode 100644 index 00000000..1dbd7615 --- /dev/null +++ b/tests/modules/csvtk/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CSVTK_SPLIT { + ext.args = "-C \'&\' --fields \'first_name\' " + } + +} diff --git a/tests/modules/csvtk/split/test.yml b/tests/modules/csvtk/split/test.yml index ade2fe48..bd13cca6 100644 --- a/tests/modules/csvtk/split/test.yml +++ b/tests/modules/csvtk/split/test.yml @@ -1,5 +1,5 @@ - name: csvtk split test_csvtk_split_tsv - command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_tsv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_tsv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config tags: - csvtk/split - csvtk @@ -12,7 +12,7 @@ md5sum: 45ae6da8111096746d1736d34220a3ec - name: csvtk split test_csvtk_split_csv - command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_csv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_csv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config tags: - csvtk/split - csvtk diff --git a/tests/modules/custom/dumpsoftwareversions/main.nf b/tests/modules/custom/dumpsoftwareversions/main.nf index 020b19bd..95a43a82 100644 --- a/tests/modules/custom/dumpsoftwareversions/main.nf +++ b/tests/modules/custom/dumpsoftwareversions/main.nf @@ -2,23 +2,54 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../../modules/multiqc/main.nf' addParams( options: [:] ) -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' addParams( options: [publish_dir:'custom'] ) +include { FASTQC } from '../../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../../modules/multiqc/main.nf' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' + +workflow fastqc1 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions +} + +workflow fastqc2 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions + zip = FASTQC.out.zip +} workflow test_custom_dumpsoftwareversions { input = [ [ id: 'test', single_end: false ], - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] - FASTQC ( input ) - MULTIQC ( FASTQC.out.zip.collect { it[1] } ) + // Using subworkflows to ensure that the script can properly handle + // cases where subworkflows have a module with the same name. + fastqc1 ( input ) + fastqc2 ( input ) + MULTIQC ( fastqc2.out.zip.collect { it[1] } ) - ch_software_versions = Channel.empty() - ch_software_versions = ch_software_versions.mix(FASTQC.out.versions) - ch_software_versions = ch_software_versions.mix(MULTIQC.out.versions) + fastqc1 + .out + .versions + .mix(fastqc2.out.versions) + .mix(MULTIQC.out.versions) + .set { ch_software_versions } CUSTOM_DUMPSOFTWAREVERSIONS ( ch_software_versions.collectFile() ) } diff --git a/tests/modules/custom/dumpsoftwareversions/nextflow.config b/tests/modules/custom/dumpsoftwareversions/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/dumpsoftwareversions/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/dumpsoftwareversions/test.yml b/tests/modules/custom/dumpsoftwareversions/test.yml index 1815c0ba..363a1218 100644 --- a/tests/modules/custom/dumpsoftwareversions/test.yml +++ b/tests/modules/custom/dumpsoftwareversions/test.yml @@ -1,8 +1,14 @@ - name: custom dumpsoftwareversions - command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c tests/config/nextflow.config + command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c ./tests/config/nextflow.config -c ./tests/modules/custom/dumpsoftwareversions/nextflow.config tags: - custom - custom/dumpsoftwareversions files: - path: output/custom/software_versions.yml + contains: + - FASTQC + - MULTIQC + must_not_contain: + - fastqc1 + - fastqc2 - path: output/custom/software_versions_mqc.yml diff --git a/tests/modules/custom/getchromsizes/main.nf b/tests/modules/custom/getchromsizes/main.nf index 503668ec..b4f9fb9f 100644 --- a/tests/modules/custom/getchromsizes/main.nf +++ b/tests/modules/custom/getchromsizes/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' addParams( options: [:] ) +include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' workflow test_custom_getchromsizes { diff --git a/tests/modules/custom/getchromsizes/nextflow.config b/tests/modules/custom/getchromsizes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/getchromsizes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/getchromsizes/test.yml b/tests/modules/custom/getchromsizes/test.yml index 1265f478..9a770ad4 100644 --- a/tests/modules/custom/getchromsizes/test.yml +++ b/tests/modules/custom/getchromsizes/test.yml @@ -1,5 +1,5 @@ - name: custom getchromsizes - command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c tests/config/nextflow.config + command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c ./tests/config/nextflow.config -c ./tests/modules/custom/getchromsizes/nextflow.config tags: - custom - custom/getchromsizes diff --git a/tests/modules/cutadapt/main.nf b/tests/modules/cutadapt/main.nf index 8e060398..a47feebb 100644 --- a/tests/modules/cutadapt/main.nf +++ b/tests/modules/cutadapt/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CUTADAPT } from '../../../modules/cutadapt/main.nf' addParams( options: [ args:'-q 25' ] ) +include { CUTADAPT } from '../../../modules/cutadapt/main.nf' // // Test with single-end data diff --git a/tests/modules/cutadapt/nextflow.config b/tests/modules/cutadapt/nextflow.config new file mode 100644 index 00000000..2af532cc --- /dev/null +++ b/tests/modules/cutadapt/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CUTADAPT { + ext.args = '-q 25' + } + +} diff --git a/tests/modules/cutadapt/test.yml b/tests/modules/cutadapt/test.yml index 40710dc5..6fa0eb4f 100644 --- a/tests/modules/cutadapt/test.yml +++ b/tests/modules/cutadapt/test.yml @@ -1,5 +1,5 @@ - name: cutadapt single-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: @@ -7,7 +7,7 @@ - path: ./output/cutadapt/test.trim.fastq.gz - name: cutadapt paired-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: diff --git a/tests/modules/damageprofiler/main.nf b/tests/modules/damageprofiler/main.nf index 36ae7b24..9207caf1 100644 --- a/tests/modules/damageprofiler/main.nf +++ b/tests/modules/damageprofiler/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' addParams( options: [:] ) +include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' workflow test_damageprofiler { diff --git a/tests/modules/damageprofiler/nextflow.config b/tests/modules/damageprofiler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/damageprofiler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/damageprofiler/test.yml b/tests/modules/damageprofiler/test.yml index 9ef964dc..4a560ce1 100644 --- a/tests/modules/damageprofiler/test.yml +++ b/tests/modules/damageprofiler/test.yml @@ -1,5 +1,5 @@ - name: damageprofiler - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -36,7 +36,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_reference - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -73,7 +73,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_specieslist - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf index 31c32ef4..f6f6becf 100644 --- a/tests/modules/dastool/dastool/main.nf +++ b/tests/modules/dastool/dastool/main.nf @@ -1,10 +1,10 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) -include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' addParams( options: [args: '--score_threshold 0 --debug'] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' +include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' workflow test_dastool_dastool { diff --git a/tests/modules/dastool/dastool/nextflow.config b/tests/modules/dastool/dastool/nextflow.config new file mode 100644 index 00000000..e306b4b4 --- /dev/null +++ b/tests/modules/dastool/dastool/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + + withName: DASTOOL_DASTOOL { + ext.args = '--score_threshold 0 --debug' + } + +} diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml index eff02f96..e2161890 100644 --- a/tests/modules/dastool/dastool/test.yml +++ b/tests/modules/dastool/dastool/test.yml @@ -1,5 +1,5 @@ - name: dastool dastool test_dastool_dastool - command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dastool/dastool -entry test_dastool_dastool -c ./tests/config/nextflow.config -c ./tests/modules/dastool/dastool/nextflow.config tags: - dastool - dastool/dastool diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf index 63ffe82a..a0cd6726 100644 --- a/tests/modules/dastool/scaffolds2bin/main.nf +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' workflow test_dastool_scaffolds2bin { diff --git a/tests/modules/dastool/scaffolds2bin/nextflow.config b/tests/modules/dastool/scaffolds2bin/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml index c6e25bff..26f528c9 100644 --- a/tests/modules/dastool/scaffolds2bin/test.yml +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -1,5 +1,5 @@ - name: dastool scaffolds2bin test_dastool_scaffolds2bin - command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c ./tests/config/nextflow.config -c ./tests/modules/dastool/scaffolds2bin/nextflow.config tags: - dastool - dastool/scaffolds2bin diff --git a/tests/modules/dedup/main.nf b/tests/modules/dedup/main.nf index 37e8e5c2..4a397eaa 100644 --- a/tests/modules/dedup/main.nf +++ b/tests/modules/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEDUP } from '../../../modules/dedup/main.nf' addParams( options: [args: "-m"] ) +include { DEDUP } from '../../../modules/dedup/main.nf' workflow test_dedup { diff --git a/tests/modules/dedup/nextflow.config b/tests/modules/dedup/nextflow.config new file mode 100644 index 00000000..80a42463 --- /dev/null +++ b/tests/modules/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEDUP { + ext.args = '-m' + } + +} diff --git a/tests/modules/dedup/test.yml b/tests/modules/dedup/test.yml index b35cfafd..077aac0d 100644 --- a/tests/modules/dedup/test.yml +++ b/tests/modules/dedup/test.yml @@ -1,5 +1,5 @@ - name: dedup test_dedup - command: nextflow run tests/modules/dedup -entry test_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dedup -entry test_dedup -c ./tests/config/nextflow.config -c ./tests/modules/dedup/nextflow.config tags: - dedup files: diff --git a/tests/modules/deeptools/computematrix/main.nf b/tests/modules/deeptools/computematrix/main.nf index 116bc851..35e49f59 100644 --- a/tests/modules/deeptools/computematrix/main.nf +++ b/tests/modules/deeptools/computematrix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' addParams( options: ['args' : 'scale-regions -b 1000'] ) +include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' workflow test_deeptools_computematrix { diff --git a/tests/modules/deeptools/computematrix/nextflow.config b/tests/modules/deeptools/computematrix/nextflow.config new file mode 100644 index 00000000..285b2165 --- /dev/null +++ b/tests/modules/deeptools/computematrix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEEPTOOLS_COMPUTEMATRIX { + ext.args = 'scale-regions -b 1000' + } + +} diff --git a/tests/modules/deeptools/computematrix/test.yml b/tests/modules/deeptools/computematrix/test.yml index fb2fa9e1..88657de3 100644 --- a/tests/modules/deeptools/computematrix/test.yml +++ b/tests/modules/deeptools/computematrix/test.yml @@ -1,5 +1,5 @@ - name: deeptools computematrix - command: nextflow run tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/computematrix/nextflow.config tags: - deeptools - deeptools/computematrix diff --git a/tests/modules/deeptools/plotfingerprint/main.nf b/tests/modules/deeptools/plotfingerprint/main.nf index e84adc39..bcef970e 100644 --- a/tests/modules/deeptools/plotfingerprint/main.nf +++ b/tests/modules/deeptools/plotfingerprint/main.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl = 2 params.fragment_size = 1000 -include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' workflow test_deeptools_plotfingerprint { diff --git a/tests/modules/deeptools/plotfingerprint/nextflow.config b/tests/modules/deeptools/plotfingerprint/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotfingerprint/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotfingerprint/test.yml b/tests/modules/deeptools/plotfingerprint/test.yml index b7803a6e..11d4ae7b 100644 --- a/tests/modules/deeptools/plotfingerprint/test.yml +++ b/tests/modules/deeptools/plotfingerprint/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotfingerprint - command: nextflow run tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotfingerprint/nextflow.config tags: - deeptools - deeptools/plotfingerprint diff --git a/tests/modules/deeptools/plotheatmap/main.nf b/tests/modules/deeptools/plotheatmap/main.nf index 93e7d373..86005b2c 100644 --- a/tests/modules/deeptools/plotheatmap/main.nf +++ b/tests/modules/deeptools/plotheatmap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' workflow test_deeptools_plotheatmap { diff --git a/tests/modules/deeptools/plotheatmap/nextflow.config b/tests/modules/deeptools/plotheatmap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotheatmap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotheatmap/test.yml b/tests/modules/deeptools/plotheatmap/test.yml index 641d5121..9273f840 100644 --- a/tests/modules/deeptools/plotheatmap/test.yml +++ b/tests/modules/deeptools/plotheatmap/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotheatmap - command: nextflow run tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotheatmap/nextflow.config tags: - deeptools - deeptools/plotheatmap diff --git a/tests/modules/deeptools/plotprofile/main.nf b/tests/modules/deeptools/plotprofile/main.nf index ac91f0c5..63ee47cd 100644 --- a/tests/modules/deeptools/plotprofile/main.nf +++ b/tests/modules/deeptools/plotprofile/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' workflow test_deeptools_plotprofile { diff --git a/tests/modules/deeptools/plotprofile/nextflow.config b/tests/modules/deeptools/plotprofile/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotprofile/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotprofile/test.yml b/tests/modules/deeptools/plotprofile/test.yml index efe02ce5..4b6c5b9a 100644 --- a/tests/modules/deeptools/plotprofile/test.yml +++ b/tests/modules/deeptools/plotprofile/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotprofile - command: nextflow run tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotprofile/nextflow.config tags: - deeptools - deeptools/plotprofile diff --git a/tests/modules/delly/call/main.nf b/tests/modules/delly/call/main.nf index f41dda95..f4583e05 100644 --- a/tests/modules/delly/call/main.nf +++ b/tests/modules/delly/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' addParams( options: [:] ) +include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' workflow test_delly_call { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/delly/call/nextflow.config b/tests/modules/delly/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/delly/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/delly/call/test.yml b/tests/modules/delly/call/test.yml index d8750892..a770d213 100644 --- a/tests/modules/delly/call/test.yml +++ b/tests/modules/delly/call/test.yml @@ -1,10 +1,9 @@ - name: delly call test_delly_call - command: nextflow run tests/modules/delly/call -entry test_delly_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/delly/call -entry test_delly_call -c ./tests/config/nextflow.config -c ./tests/modules/delly/call/nextflow.config tags: - delly - delly/call files: - path: output/delly/test.bcf - md5sum: 360c1bf6867f33bd2a868ddfb4d957fc - path: output/delly/test.bcf.csi md5sum: 19e0cdf06c415f4942f6d4dbd5fb7271 diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index ab131a86..87d05bf9 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' addParams( options: [ suffix: '.diamond_blastp' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' workflow test_diamond_blastp { diff --git a/tests/modules/diamond/blastp/nextflow.config b/tests/modules/diamond/blastp/nextflow.config new file mode 100644 index 00000000..d1222d49 --- /dev/null +++ b/tests/modules/diamond/blastp/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTP { + ext.suffix = '.diamond_blastp' + } + +} diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index ae62ea51..673563cb 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -1,5 +1,5 @@ - name: diamond blastp - command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastp/nextflow.config tags: - diamond - diamond/blastp diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index c0e437d7..77eb08ea 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' addParams( options: [ suffix: '.diamond_blastx' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' workflow test_diamond_blastx { diff --git a/tests/modules/diamond/blastx/nextflow.config b/tests/modules/diamond/blastx/nextflow.config new file mode 100644 index 00000000..83169455 --- /dev/null +++ b/tests/modules/diamond/blastx/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTX { + ext.suffix = '.diamond_blastx' + } + +} diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index 91a6eb4f..ee94802f 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -1,5 +1,5 @@ - name: diamond blastx - command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastx/nextflow.config tags: - diamond - diamond/blastx diff --git a/tests/modules/diamond/makedb/main.nf b/tests/modules/diamond/makedb/main.nf index bcd7691e..70982ae9 100644 --- a/tests/modules/diamond/makedb/main.nf +++ b/tests/modules/diamond/makedb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' workflow test_diamond_makedb { diff --git a/tests/modules/diamond/makedb/nextflow.config b/tests/modules/diamond/makedb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/diamond/makedb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/diamond/makedb/test.yml b/tests/modules/diamond/makedb/test.yml index 335b571f..c8f2d79e 100644 --- a/tests/modules/diamond/makedb/test.yml +++ b/tests/modules/diamond/makedb/test.yml @@ -1,5 +1,5 @@ - name: diamond makedb test_diamond_makedb - command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c ./tests/config/nextflow.config -c ./tests/modules/diamond/makedb/nextflow.config tags: - diamond - diamond/makedb diff --git a/tests/modules/dragonflye/main.nf b/tests/modules/dragonflye/main.nf index 4d3ac6e5..3d59bb21 100644 --- a/tests/modules/dragonflye/main.nf +++ b/tests/modules/dragonflye/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler miniasm --gsize 5000000'] ) -include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler raven --gsize 5000000'] ) +include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' +include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' workflow test_dragonflye { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/dragonflye/nextflow.config b/tests/modules/dragonflye/nextflow.config new file mode 100644 index 00000000..fea43da4 --- /dev/null +++ b/tests/modules/dragonflye/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DRAGONFLYE { + ext.args = '--assembler miniasm --gsize 5000000' + } + + withName: DRAGONFLYE_RAVEN { + ext.args = '--assembler raven --gsize 5000000' + } + +} diff --git a/tests/modules/dragonflye/test.yml b/tests/modules/dragonflye/test.yml index fe6283c0..ef9121ba 100644 --- a/tests/modules/dragonflye/test.yml +++ b/tests/modules/dragonflye/test.yml @@ -1,5 +1,5 @@ - name: dragonflye with miniasm - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: @@ -12,7 +12,7 @@ - path: output/dragonflye/dragonflye.log - name: dragonflye with raven - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: diff --git a/tests/modules/dshbio/exportsegments/main.nf b/tests/modules/dshbio/exportsegments/main.nf index 6eef1046..c213dc54 100644 --- a/tests/modules/dshbio/exportsegments/main.nf +++ b/tests/modules/dshbio/exportsegments/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' addParams( options: [:] ) +include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' workflow test_dshbio_exportsegments { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/exportsegments/nextflow.config b/tests/modules/dshbio/exportsegments/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/dshbio/exportsegments/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/dshbio/exportsegments/test.yml b/tests/modules/dshbio/exportsegments/test.yml index 453e1cba..c811df03 100644 --- a/tests/modules/dshbio/exportsegments/test.yml +++ b/tests/modules/dshbio/exportsegments/test.yml @@ -1,5 +1,5 @@ - name: dshbio exportsegments - command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/exportsegments/nextflow.config tags: - dshbio - dshbio/exportsegments diff --git a/tests/modules/dshbio/filterbed/main.nf b/tests/modules/dshbio/filterbed/main.nf index 722c88d2..454a03be 100644 --- a/tests/modules/dshbio/filterbed/main.nf +++ b/tests/modules/dshbio/filterbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' addParams( options: [suffix: '.filtered', args: '--range chr1:0-1000'] ) +include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' workflow test_dshbio_filterbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filterbed/nextflow.config b/tests/modules/dshbio/filterbed/nextflow.config new file mode 100644 index 00000000..2f1e5ab9 --- /dev/null +++ b/tests/modules/dshbio/filterbed/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERBED { + ext.args = '--range chr1:0-1000' + ext.suffix = '.filtered' + } +} diff --git a/tests/modules/dshbio/filterbed/test.yml b/tests/modules/dshbio/filterbed/test.yml index ad1cde66..278fd5a3 100644 --- a/tests/modules/dshbio/filterbed/test.yml +++ b/tests/modules/dshbio/filterbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio filterbed - command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filterbed/nextflow.config tags: - dshbio - dshbio/filterbed diff --git a/tests/modules/dshbio/filtergff3/main.nf b/tests/modules/dshbio/filtergff3/main.nf index 3156d091..7c803781 100644 --- a/tests/modules/dshbio/filtergff3/main.nf +++ b/tests/modules/dshbio/filtergff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' addParams( options: [suffix: '.filtered', args: '--range MT192765.1:0-1000'] ) +include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' workflow test_dshbio_filtergff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filtergff3/nextflow.config b/tests/modules/dshbio/filtergff3/nextflow.config new file mode 100644 index 00000000..c4b75eaf --- /dev/null +++ b/tests/modules/dshbio/filtergff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERGFF3 { + ext.args = '--range MT192765.1:0-1000' + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/dshbio/filtergff3/test.yml b/tests/modules/dshbio/filtergff3/test.yml index 95d1b446..43238333 100644 --- a/tests/modules/dshbio/filtergff3/test.yml +++ b/tests/modules/dshbio/filtergff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio filtergff3 - command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filtergff3/nextflow.config tags: - dshbio - dshbio/filtergff3 diff --git a/tests/modules/dshbio/splitbed/main.nf b/tests/modules/dshbio/splitbed/main.nf index d7f3d004..517baad0 100644 --- a/tests/modules/dshbio/splitbed/main.nf +++ b/tests/modules/dshbio/splitbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' addParams( options: [suffix: '.', args: '--records 2'] ) +include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' workflow test_dshbio_splitbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitbed/nextflow.config b/tests/modules/dshbio/splitbed/nextflow.config new file mode 100644 index 00000000..4369c509 --- /dev/null +++ b/tests/modules/dshbio/splitbed/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITBED { + ext.suffix = '.' + ext.args = '--records 2' + } + +} diff --git a/tests/modules/dshbio/splitbed/test.yml b/tests/modules/dshbio/splitbed/test.yml index 04f5b150..ab14648e 100644 --- a/tests/modules/dshbio/splitbed/test.yml +++ b/tests/modules/dshbio/splitbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitbed - command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitbed/nextflow.config tags: - dshbio - dshbio/splitbed diff --git a/tests/modules/dshbio/splitgff3/main.nf b/tests/modules/dshbio/splitgff3/main.nf index dd58201a..03aa5394 100644 --- a/tests/modules/dshbio/splitgff3/main.nf +++ b/tests/modules/dshbio/splitgff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' addParams( options: [suffix: '.', args: '--records 15'] ) +include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' workflow test_dshbio_splitgff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitgff3/nextflow.config b/tests/modules/dshbio/splitgff3/nextflow.config new file mode 100644 index 00000000..e31f8e13 --- /dev/null +++ b/tests/modules/dshbio/splitgff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITGFF3 { + ext.suffix = '.' + ext.args = '--records 15' + } + +} diff --git a/tests/modules/dshbio/splitgff3/test.yml b/tests/modules/dshbio/splitgff3/test.yml index fe5b1bed..6087ce11 100644 --- a/tests/modules/dshbio/splitgff3/test.yml +++ b/tests/modules/dshbio/splitgff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitgff3 - command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitgff3/nextflow.config tags: - dshbio - dshbio/splitgff3 diff --git a/tests/modules/ectyper/main.nf b/tests/modules/ectyper/main.nf index 123df68d..dd359fa2 100644 --- a/tests/modules/ectyper/main.nf +++ b/tests/modules/ectyper/main.nf @@ -2,12 +2,14 @@ nextflow.enable.dsl = 2 -include { ECTYPER } from '../../../modules/ectyper/main.nf' addParams( options: [:] ) +include { ECTYPER } from '../../../modules/ectyper/main.nf' workflow test_ectyper { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] ECTYPER ( input ) } diff --git a/tests/modules/ectyper/nextflow.config b/tests/modules/ectyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ectyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ectyper/test.yml b/tests/modules/ectyper/test.yml index c6f4c668..4f909bd9 100644 --- a/tests/modules/ectyper/test.yml +++ b/tests/modules/ectyper/test.yml @@ -1,5 +1,5 @@ - name: ectyper test_ectyper - command: nextflow run tests/modules/ectyper -entry test_ectyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ectyper -entry test_ectyper -c ./tests/config/nextflow.config -c ./tests/modules/ectyper/nextflow.config tags: - ectyper files: diff --git a/tests/modules/emmtyper/main.nf b/tests/modules/emmtyper/main.nf index 9f2181a8..ee96fc32 100644 --- a/tests/modules/emmtyper/main.nf +++ b/tests/modules/emmtyper/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { EMMTYPER } from '../../../modules/emmtyper/main.nf' addParams( options: [:] ) +include { EMMTYPER } from '../../../modules/emmtyper/main.nf' workflow test_emmtyper { diff --git a/tests/modules/emmtyper/nextflow.config b/tests/modules/emmtyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/emmtyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/emmtyper/test.yml b/tests/modules/emmtyper/test.yml index da59e0f1..81854eb6 100644 --- a/tests/modules/emmtyper/test.yml +++ b/tests/modules/emmtyper/test.yml @@ -1,5 +1,5 @@ - name: emmtyper test_emmtyper - command: nextflow run tests/modules/emmtyper -entry test_emmtyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/emmtyper -entry test_emmtyper -c ./tests/config/nextflow.config -c ./tests/modules/emmtyper/nextflow.config tags: - emmtyper files: diff --git a/tests/modules/ensemblvep/main.nf b/tests/modules/ensemblvep/main.nf index 3cbb26f1..223847c7 100644 --- a/tests/modules/ensemblvep/main.nf +++ b/tests/modules/ensemblvep/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' addParams( vep_tag: '104.3.WBcel235', use_cache: false ) +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' workflow test_ensemblvep { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) } diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config new file mode 100644 index 00000000..bcca2d06 --- /dev/null +++ b/tests/modules/ensemblvep/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + ext.vep_tag = '104.3.WBcel235' + ext.use_cache = false + } + +} diff --git a/tests/modules/ensemblvep/test.yml b/tests/modules/ensemblvep/test.yml index a6e33cae..42384d6e 100644 --- a/tests/modules/ensemblvep/test.yml +++ b/tests/modules/ensemblvep/test.yml @@ -1,5 +1,5 @@ - name: ensemblvep test_ensemblvep - command: nextflow run tests/modules/ensemblvep -entry test_ensemblvep -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ensemblvep -entry test_ensemblvep -c ./tests/config/nextflow.config -c ./tests/modules/ensemblvep/nextflow.config tags: - ensemblvep files: diff --git a/tests/modules/expansionhunter/main.nf b/tests/modules/expansionhunter/main.nf index a7acbff4..91faeeb8 100644 --- a/tests/modules/expansionhunter/main.nf +++ b/tests/modules/expansionhunter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' addParams( options: [:] ) +include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' workflow test_expansionhunter { diff --git a/tests/modules/expansionhunter/nextflow.config b/tests/modules/expansionhunter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/expansionhunter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/expansionhunter/test.yml b/tests/modules/expansionhunter/test.yml index 78d5c002..19403588 100644 --- a/tests/modules/expansionhunter/test.yml +++ b/tests/modules/expansionhunter/test.yml @@ -1,5 +1,5 @@ - name: expansionhunter test_expansionhunter - command: nextflow run tests/modules/expansionhunter -entry test_expansionhunter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/expansionhunter -entry test_expansionhunter -c ./tests/config/nextflow.config -c ./tests/modules/expansionhunter/nextflow.config tags: - expansionhunter files: diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf index f89392ff..6600015b 100644 --- a/tests/modules/fargene/main.nf +++ b/tests/modules/fargene/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FARGENE } from '../../../modules/fargene/main.nf' addParams( options: [:] ) +include { FARGENE } from '../../../modules/fargene/main.nf' workflow test_fargene { diff --git a/tests/modules/fargene/nextflow.config b/tests/modules/fargene/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fargene/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml index 3db6699c..622e44b0 100644 --- a/tests/modules/fargene/test.yml +++ b/tests/modules/fargene/test.yml @@ -1,5 +1,5 @@ - name: fargene - command: nextflow run tests/modules/fargene -entry test_fargene -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fargene -entry test_fargene -c ./tests/config/nextflow.config -c ./tests/modules/fargene/nextflow.config tags: - fargene files: diff --git a/tests/modules/fastani/main.nf b/tests/modules/fastani/main.nf index a5548e20..0395f6a9 100644 --- a/tests/modules/fastani/main.nf +++ b/tests/modules/fastani/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTANI } from '../../../modules/fastani/main.nf' addParams( options: [:] ) +include { FASTANI } from '../../../modules/fastani/main.nf' workflow test_fastani { diff --git a/tests/modules/fastani/nextflow.config b/tests/modules/fastani/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastani/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastani/test.yml b/tests/modules/fastani/test.yml index cd411d06..f3748d25 100644 --- a/tests/modules/fastani/test.yml +++ b/tests/modules/fastani/test.yml @@ -1,5 +1,5 @@ - name: fastani - command: nextflow run ./tests/modules/fastani -entry test_fastani -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastani -entry test_fastani -c ./tests/config/nextflow.config -c ./tests/modules/fastani/nextflow.config tags: - fastani files: diff --git a/tests/modules/fastp/main.nf b/tests/modules/fastp/main.nf index c8e5112f..d1540974 100644 --- a/tests/modules/fastp/main.nf +++ b/tests/modules/fastp/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTP } from '../../../modules/fastp/main.nf' addParams( options: [:] ) +include { FASTP } from '../../../modules/fastp/main.nf' // // Test with single-end data diff --git a/tests/modules/fastp/nextflow.config b/tests/modules/fastp/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastp/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastp/test.yml b/tests/modules/fastp/test.yml index 365ce025..cd7ddeed 100644 --- a/tests/modules/fastp/test.yml +++ b/tests/modules/fastp/test.yml @@ -1,5 +1,5 @@ - name: fastp test_fastp_single_end - command: nextflow run tests/modules/fastp -entry test_fastp_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -17,7 +17,7 @@ md5sum: e0d856ebb3da9e4462c3ce9683efe01d - name: fastp test_fastp_paired_end - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -38,7 +38,7 @@ md5sum: 9eff7203596580cc5e42aceab4a469df - name: fastp test_fastp_single_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_single_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -58,7 +58,7 @@ md5sum: de315d397c994d8e66bafc7a8dc11070 - name: fastp test_fastp_paired_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -83,7 +83,7 @@ md5sum: f52309b35a7c15cbd56a9c3906ef98a5 - name: fastp test_fastp_paired_end_merged - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_merged -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_merged -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: diff --git a/tests/modules/fastqc/main.nf b/tests/modules/fastqc/main.nf index d95befec..f7db9b7c 100644 --- a/tests/modules/fastqc/main.nf +++ b/tests/modules/fastqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' // // Test with single-end data diff --git a/tests/modules/fastqc/nextflow.config b/tests/modules/fastqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastqc/test.yml b/tests/modules/fastqc/test.yml index 794e63fe..fa830cbc 100644 --- a/tests/modules/fastqc/test.yml +++ b/tests/modules/fastqc/test.yml @@ -1,5 +1,5 @@ - name: fastqc single-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: @@ -7,7 +7,7 @@ - path: ./output/fastqc/test_fastqc.zip - name: fastqc paired-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: diff --git a/tests/modules/fastqscan/main.nf b/tests/modules/fastqscan/main.nf index 5fd824f6..b9a321fe 100644 --- a/tests/modules/fastqscan/main.nf +++ b/tests/modules/fastqscan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' addParams( options: [ args: "-g 30000"] ) +include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' workflow test_fastqscan { diff --git a/tests/modules/fastqscan/nextflow.config b/tests/modules/fastqscan/nextflow.config new file mode 100644 index 00000000..f688ecb6 --- /dev/null +++ b/tests/modules/fastqscan/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FASTQSCAN { + ext.args = '-g 30000' + } + +} diff --git a/tests/modules/fastqscan/test.yml b/tests/modules/fastqscan/test.yml index 80bcbc47..d538804c 100644 --- a/tests/modules/fastqscan/test.yml +++ b/tests/modules/fastqscan/test.yml @@ -1,5 +1,5 @@ - name: fastqscan test_fastqscan - command: nextflow run tests/modules/fastqscan -entry test_fastqscan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqscan -entry test_fastqscan -c ./tests/config/nextflow.config -c ./tests/modules/fastqscan/nextflow.config tags: - fastqscan files: diff --git a/tests/modules/fasttree/main.nf b/tests/modules/fasttree/main.nf index 109aaa77..e33228a9 100644 --- a/tests/modules/fasttree/main.nf +++ b/tests/modules/fasttree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTTREE } from '../../../modules/fasttree/main.nf' addParams( options: [:] ) +include { FASTTREE } from '../../../modules/fasttree/main.nf' workflow test_fasttree { diff --git a/tests/modules/fasttree/nextflow.config b/tests/modules/fasttree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fasttree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fasttree/test.yml b/tests/modules/fasttree/test.yml index b30590c7..7e344cff 100644 --- a/tests/modules/fasttree/test.yml +++ b/tests/modules/fasttree/test.yml @@ -1,5 +1,5 @@ - name: fasttree - command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c ./tests/config/nextflow.config -c ./tests/modules/fasttree/nextflow.config tags: - fasttree files: diff --git a/tests/modules/fgbio/callmolecularconsensusreads/main.nf b/tests/modules/fgbio/callmolecularconsensusreads/main.nf index 8ce34eca..e31fdf39 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/tests/modules/fgbio/callmolecularconsensusreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [args: '-s TemplateCoordinate', suffix: '_out'] ) -include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' addParams( options: [args: '-M 1', suffix: '_molreads'] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' +include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' workflow test_fgbio_callmolecularconsensusreads { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config new file mode 100644 index 00000000..0a266da9 --- /dev/null +++ b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FGBIO_SORTBAM { + ext.args = '-s TemplateCoordinate' + ext.suffix = '_out' + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1' + ext.suffix = '_molreads' + } + +} diff --git a/tests/modules/fgbio/callmolecularconsensusreads/test.yml b/tests/modules/fgbio/callmolecularconsensusreads/test.yml index ac53957c..5e26cd01 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/test.yml +++ b/tests/modules/fgbio/callmolecularconsensusreads/test.yml @@ -1,5 +1,5 @@ - name: fgbio callmolecularconsensusreads - command: nextflow run tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/callmolecularconsensusreads/nextflow.config tags: - fgbio - fgbio/callmolecularconsensusreads diff --git a/tests/modules/fgbio/fastqtobam/main.nf b/tests/modules/fgbio/fastqtobam/main.nf index ce2f7efc..f01a17fa 100644 --- a/tests/modules/fgbio/fastqtobam/main.nf +++ b/tests/modules/fgbio/fastqtobam/main.nf @@ -1,16 +1,19 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -params.read_structure = "+T 12M11S+T" -include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' addParams( options: [:] ) +include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' workflow test_fgbio_fastqtobam { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + read_structure = "+T 12M11S+T" - FGBIO_FASTQTOBAM ( input, "${params.read_structure}" ) + FGBIO_FASTQTOBAM ( input, read_structure ) } diff --git a/tests/modules/fgbio/fastqtobam/nextflow.config b/tests/modules/fgbio/fastqtobam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/fastqtobam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/fastqtobam/test.yml b/tests/modules/fgbio/fastqtobam/test.yml index 6f2554e9..ab73f425 100644 --- a/tests/modules/fgbio/fastqtobam/test.yml +++ b/tests/modules/fgbio/fastqtobam/test.yml @@ -1,10 +1,8 @@ - name: fgbio fastqtobam test_fgbio_fastqtobam - command: nextflow run tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/fastqtobam/nextflow.config tags: - fgbio/fastqtobam - fgbio files: - path: output/fgbio/test_umi_converted.bam md5sum: 9510735554e5eff29244077a72075fb6 - - path: output/fgbio/versions.yml - md5sum: 524815093b96759060d0d800fc6a3f25 diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf index 31f55724..1d5fb474 100644 --- a/tests/modules/fgbio/groupreadsbyumi/main.nf +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' addParams( options: [:] ) +include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' workflow test_fgbio_groupreadsbyumi { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] - + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) + ] strategy = "Adjacency" FGBIO_GROUPREADSBYUMI ( input, strategy ) diff --git a/tests/modules/fgbio/groupreadsbyumi/nextflow.config b/tests/modules/fgbio/groupreadsbyumi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/groupreadsbyumi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/groupreadsbyumi/test.yml b/tests/modules/fgbio/groupreadsbyumi/test.yml index ce70f129..c1cfd4f0 100644 --- a/tests/modules/fgbio/groupreadsbyumi/test.yml +++ b/tests/modules/fgbio/groupreadsbyumi/test.yml @@ -1,5 +1,5 @@ - name: fgbio groupreadsbyumi test_fgbio_groupreadsbyumi - command: nextflow run tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/groupreadsbyumi/nextflow.config tags: - fgbio - fgbio/groupreadsbyumi diff --git a/tests/modules/fgbio/sortbam/main.nf b/tests/modules/fgbio/sortbam/main.nf index 65bea1d7..ada99d0f 100644 --- a/tests/modules/fgbio/sortbam/main.nf +++ b/tests/modules/fgbio/sortbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [:] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' workflow test_fgbio_sortbam { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/sortbam/nextflow.config b/tests/modules/fgbio/sortbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/sortbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/sortbam/test.yml b/tests/modules/fgbio/sortbam/test.yml index 68183cd2..6789aed8 100644 --- a/tests/modules/fgbio/sortbam/test.yml +++ b/tests/modules/fgbio/sortbam/test.yml @@ -1,5 +1,5 @@ - name: fgbio sortbam - command: nextflow run tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/sortbam/nextflow.config tags: - fgbio - fgbio/sortbam diff --git a/tests/modules/filtlong/main.nf b/tests/modules/filtlong/main.nf index cd037623..df7892aa 100644 --- a/tests/modules/filtlong/main.nf +++ b/tests/modules/filtlong/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FILTLONG } from '../../../modules/filtlong/main.nf' addParams( options: [:] ) +include { FILTLONG } from '../../../modules/filtlong/main.nf' workflow test_filtlong { diff --git a/tests/modules/filtlong/nextflow.config b/tests/modules/filtlong/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/filtlong/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/filtlong/test.yml b/tests/modules/filtlong/test.yml index 30779d45..dc5fa5a9 100644 --- a/tests/modules/filtlong/test.yml +++ b/tests/modules/filtlong/test.yml @@ -1,5 +1,5 @@ - name: filtlong test_filtlong - command: nextflow run tests/modules/filtlong -entry test_filtlong -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -7,7 +7,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_se - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_se -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -15,7 +15,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_pe - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_pe -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: diff --git a/tests/modules/flash/main.nf b/tests/modules/flash/main.nf index 2128650d..4afcb8fc 100644 --- a/tests/modules/flash/main.nf +++ b/tests/modules/flash/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FLASH } from '../../../modules/flash/main.nf' addParams( options: [args:'-m 20 -M 100'] ) +include { FLASH } from '../../../modules/flash/main.nf' workflow test_flash { input = [ diff --git a/tests/modules/flash/nextflow.config b/tests/modules/flash/nextflow.config new file mode 100644 index 00000000..2845f9d9 --- /dev/null +++ b/tests/modules/flash/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FLASH { + ext.args = '-m 20 -M 100' + } + +} diff --git a/tests/modules/flash/test.yml b/tests/modules/flash/test.yml index 31cdaeff..e5ed49ca 100644 --- a/tests/modules/flash/test.yml +++ b/tests/modules/flash/test.yml @@ -1,5 +1,5 @@ - name: flash test_flash - command: nextflow run tests/modules/flash -entry test_flash -c tests/config/nextflow.config + command: nextflow run ./tests/modules/flash -entry test_flash -c ./tests/config/nextflow.config -c ./tests/modules/flash/nextflow.config tags: - flash files: diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf index c6f5641f..f8ae0ecb 100644 --- a/tests/modules/freebayes/main.nf +++ b/tests/modules/freebayes/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) +include { FREEBAYES } from '../../../modules/freebayes/main.nf' workflow test_freebayes { diff --git a/tests/modules/freebayes/nextflow.config b/tests/modules/freebayes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/freebayes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml index 22fd0e88..c9aa78da 100644 --- a/tests/modules/freebayes/test.yml +++ b/tests/modules/freebayes/test.yml @@ -1,33 +1,33 @@ - name: freebayes test_freebayes - command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_bed - command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_bed -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_cram - command: nextflow run tests/modules/freebayes -entry test_freebayes_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_cram -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic_cram_intervals - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: diff --git a/tests/modules/gatk4/applybqsr/main.nf b/tests/modules/gatk4/applybqsr/main.nf index 80b51015..da85b11b 100644 --- a/tests/modules/gatk4/applybqsr/main.nf +++ b/tests/modules/gatk4/applybqsr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' addParams( options: [:] ) +include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' workflow test_gatk4_applybqsr { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/applybqsr/nextflow.config b/tests/modules/gatk4/applybqsr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/applybqsr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index 02448b02..d0b07d94 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -1,5 +1,5 @@ - name: gatk4 applybqsr test_gatk4_applybqsr - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 @@ -8,7 +8,7 @@ md5sum: af56f5dd81b95070079d54670507f530 - name: gatk4 applybqsr test_gatk4_applybqsr_intervals - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 @@ -17,7 +17,7 @@ md5sum: 0cbfa4be143e988d56ce741b5077510e - name: gatk4 applybqsr test_gatk4_applybqsr_cram - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 diff --git a/tests/modules/gatk4/baserecalibrator/main.nf b/tests/modules/gatk4/baserecalibrator/main.nf index a50c09e3..2675d04b 100644 --- a/tests/modules/gatk4/baserecalibrator/main.nf +++ b/tests/modules/gatk4/baserecalibrator/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' addParams( options: [:] ) +include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' workflow test_gatk4_baserecalibrator { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/baserecalibrator/nextflow.config b/tests/modules/gatk4/baserecalibrator/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/baserecalibrator/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/baserecalibrator/test.yml b/tests/modules/gatk4/baserecalibrator/test.yml index a15c9ee3..163fac08 100644 --- a/tests/modules/gatk4/baserecalibrator/test.yml +++ b/tests/modules/gatk4/baserecalibrator/test.yml @@ -1,5 +1,5 @@ - name: gatk4 baserecalibrator test_gatk4_baserecalibrator - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -8,7 +8,7 @@ md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_cram - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -17,7 +17,7 @@ md5sum: 35d89a3811aa31711fc9815b6b80e6ec - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_intervals - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -26,7 +26,7 @@ md5sum: 9ecb5f00a2229291705addc09c0ec231 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_multiple_sites - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator diff --git a/tests/modules/gatk4/bedtointervallist/main.nf b/tests/modules/gatk4/bedtointervallist/main.nf index 1ca4be58..2dd72904 100644 --- a/tests/modules/gatk4/bedtointervallist/main.nf +++ b/tests/modules/gatk4/bedtointervallist/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' addParams( options: [:] ) +include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' workflow test_gatk4_bedtointervallist { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/bedtointervallist/nextflow.config b/tests/modules/gatk4/bedtointervallist/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/bedtointervallist/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/bedtointervallist/test.yml b/tests/modules/gatk4/bedtointervallist/test.yml index 83c3a574..3482fa6c 100644 --- a/tests/modules/gatk4/bedtointervallist/test.yml +++ b/tests/modules/gatk4/bedtointervallist/test.yml @@ -1,5 +1,5 @@ - name: gatk4 bedtointervallist test_gatk4_bedtointervallist - command: nextflow run tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/bedtointervallist/nextflow.config tags: - gatk4 - gatk4/bedtointervallist diff --git a/tests/modules/gatk4/calculatecontamination/main.nf b/tests/modules/gatk4/calculatecontamination/main.nf index f93f66fb..4b659ed3 100644 --- a/tests/modules/gatk4/calculatecontamination/main.nf +++ b/tests/modules/gatk4/calculatecontamination/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' addParams( options: [:] ) +include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' workflow test_gatk4_calculatecontamination_tumor_only { diff --git a/tests/modules/gatk4/calculatecontamination/nextflow.config b/tests/modules/gatk4/calculatecontamination/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/calculatecontamination/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 89d419e0..0c489bff 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -1,5 +1,5 @@ - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_tumor_only - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 @@ -8,7 +8,7 @@ md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 @@ -17,7 +17,7 @@ md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 diff --git a/tests/modules/gatk4/createsequencedictionary/main.nf b/tests/modules/gatk4/createsequencedictionary/main.nf index 443d77bc..b304b043 100644 --- a/tests/modules/gatk4/createsequencedictionary/main.nf +++ b/tests/modules/gatk4/createsequencedictionary/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' addParams( options: [:] ) +include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' workflow test_gatk4_createsequencedictionary { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsequencedictionary/nextflow.config b/tests/modules/gatk4/createsequencedictionary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/createsequencedictionary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/createsequencedictionary/test.yml b/tests/modules/gatk4/createsequencedictionary/test.yml index 7788d16a..134a9d74 100644 --- a/tests/modules/gatk4/createsequencedictionary/test.yml +++ b/tests/modules/gatk4/createsequencedictionary/test.yml @@ -1,5 +1,5 @@ - name: gatk4 createsequencedictionary test_gatk4_createsequencedictionary - command: nextflow run tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsequencedictionary/nextflow.config tags: - gatk4 - gatk4/createsequencedictionary diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf index 6e5366f5..5e1d1904 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' addParams( options: [suffix:'.pon'] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' workflow test_gatk4_createsomaticpanelofnormals { db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config new file mode 100644 index 00000000..6fda39ec --- /dev/null +++ b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_CREATESOMATICPANELOFNORMALS { + ext.suffix = '.pon' + } + +} diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index d71059ad..a0e2bf26 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -1,5 +1,5 @@ - name: gatk4 createsomaticpanelofnormals test_gatk4_createsomaticpanelofnormals - command: nextflow run tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config tags: - gatk4 - gatk4/createsomaticpanelofnormals diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf index 72772318..398a6c79 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' addParams( options: [:] ) +include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' workflow test_gatk4_estimatelibrarycomplexity { diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml index ca949c00..a33e4ec1 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml +++ b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml @@ -1,5 +1,5 @@ - name: gatk4 estimatelibrarycomplexity test_gatk4_estimatelibrarycomplexity - command: nextflow run tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config tags: - gatk4/estimatelibrarycomplexity - gatk4 diff --git a/tests/modules/gatk4/fastqtosam/main.nf b/tests/modules/gatk4/fastqtosam/main.nf index 64694d9f..4f53c791 100644 --- a/tests/modules/gatk4/fastqtosam/main.nf +++ b/tests/modules/gatk4/fastqtosam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' addParams( options: [:] ) +include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' workflow test_gatk4_fastqtosam_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/gatk4/fastqtosam/nextflow.config b/tests/modules/gatk4/fastqtosam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/fastqtosam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index b576075a..d5d23f94 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -1,5 +1,5 @@ - name: gatk4 fastqtosam test_gatk4_fastqtosam_single_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4/fastqtosam - gatk4 @@ -8,7 +8,7 @@ md5sum: 0a0d308b219837977b8df9daa26db7de - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4 - gatk4/fastqtosam diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf index 5b2938e8..fa0acff9 100644 --- a/tests/modules/gatk4/filtermutectcalls/main.nf +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' addParams( options: [suffix:'.filtered'] ) +include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' workflow test_gatk4_filtermutectcalls_base { diff --git a/tests/modules/gatk4/filtermutectcalls/nextflow.config b/tests/modules/gatk4/filtermutectcalls/nextflow.config new file mode 100644 index 00000000..c830fdc6 --- /dev/null +++ b/tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_FILTERMUTECTCALLS { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index d5b97d36..72504e66 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -1,5 +1,5 @@ - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_base - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -11,7 +11,7 @@ md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -23,7 +23,7 @@ md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index aff3973d..417a08a4 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' workflow test_gatk4_genomicsdbimport_create_genomicsdb { diff --git a/tests/modules/gatk4/genomicsdbimport/nextflow.config b/tests/modules/gatk4/genomicsdbimport/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/genomicsdbimport/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 5fe2b49b..94a1a35e 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -1,5 +1,5 @@ - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_create_genomicsdb - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 @@ -20,7 +20,7 @@ md5sum: 18d3f68bd2cb6f4474990507ff95017a - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_get_intervalslist - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_get_intervalslist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_get_intervalslist -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 @@ -29,7 +29,7 @@ md5sum: 4c85812ac15fc1cd29711a851d23c0bf - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_update_genomicsdb - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_update_genomicsdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_update_genomicsdb -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf index 0b555180..208faf8b 100644 --- a/tests/modules/gatk4/genotypegvcfs/main.nf +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' addParams( options: [suffix:'.genotyped'] ) -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) +include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' // Basic parameters with uncompressed VCF input workflow test_gatk4_genotypegvcfs_vcf_input { diff --git a/tests/modules/gatk4/genotypegvcfs/nextflow.config b/tests/modules/gatk4/genotypegvcfs/nextflow.config new file mode 100644 index 00000000..aaa704da --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_GENOTYPEGVCFS { + ext.suffix = '.genotyped' + } + +} diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml index ad39a48d..45201af2 100644 --- a/tests/modules/gatk4/genotypegvcfs/test.yml +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -1,5 +1,5 @@ - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_vcf_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -8,7 +8,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -17,7 +17,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -26,7 +26,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -35,7 +35,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -44,7 +44,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -53,7 +53,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -62,7 +62,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -71,7 +71,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs diff --git a/tests/modules/gatk4/getpileupsummaries/main.nf b/tests/modules/gatk4/getpileupsummaries/main.nf index 66ee4990..52f3bdec 100644 --- a/tests/modules/gatk4/getpileupsummaries/main.nf +++ b/tests/modules/gatk4/getpileupsummaries/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_GETPILEUPSUMMARIES } from '../../../../modules/gatk4/getpileupsummaries/main.nf' addParams( options: [:] ) +include { GATK4_GETPILEUPSUMMARIES } from '../../../../modules/gatk4/getpileupsummaries/main.nf' workflow test_gatk4_getpileupsummaries_just_variants { diff --git a/tests/modules/gatk4/getpileupsummaries/nextflow.config b/tests/modules/gatk4/getpileupsummaries/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/getpileupsummaries/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml index 6c5e1f84..3211a8fe 100644 --- a/tests/modules/gatk4/getpileupsummaries/test.yml +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -1,5 +1,5 @@ - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_just_variants - command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_just_variants -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_just_variants -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/getpileupsummaries/nextflow.config tags: - gatk4 - gatk4/getpileupsummaries @@ -8,7 +8,7 @@ md5sum: 0d19674bef2ff0700d5b02b3463dd210 - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites - command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/getpileupsummaries/nextflow.config tags: - gatk4 - gatk4/getpileupsummaries diff --git a/tests/modules/gatk4/haplotypecaller/main.nf b/tests/modules/gatk4/haplotypecaller/main.nf index fd5f30fa..dc6bec67 100644 --- a/tests/modules/gatk4/haplotypecaller/main.nf +++ b/tests/modules/gatk4/haplotypecaller/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_HAPLOTYPECALLER } from '../../../../modules/gatk4/haplotypecaller/main.nf' addParams( options: [:] ) +include { GATK4_HAPLOTYPECALLER } from '../../../../modules/gatk4/haplotypecaller/main.nf' workflow test_gatk4_haplotypecaller { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/haplotypecaller/nextflow.config b/tests/modules/gatk4/haplotypecaller/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/haplotypecaller/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/haplotypecaller/test.yml b/tests/modules/gatk4/haplotypecaller/test.yml index 480ff8f0..31dd23fd 100644 --- a/tests/modules/gatk4/haplotypecaller/test.yml +++ b/tests/modules/gatk4/haplotypecaller/test.yml @@ -1,5 +1,5 @@ - name: gatk4 haplotypecaller test_gatk4_haplotypecaller - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 @@ -8,7 +8,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_cram - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 @@ -17,7 +17,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_intervals_dbsnp - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 diff --git a/tests/modules/gatk4/indexfeaturefile/main.nf b/tests/modules/gatk4/indexfeaturefile/main.nf index e523606a..f61b57fc 100644 --- a/tests/modules/gatk4/indexfeaturefile/main.nf +++ b/tests/modules/gatk4/indexfeaturefile/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' addParams( options: [:] ) +include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' workflow test_gatk4_indexfeaturefile_bed { diff --git a/tests/modules/gatk4/indexfeaturefile/nextflow.config b/tests/modules/gatk4/indexfeaturefile/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/indexfeaturefile/test.yml b/tests/modules/gatk4/indexfeaturefile/test.yml index 5883695a..c524e462 100644 --- a/tests/modules/gatk4/indexfeaturefile/test.yml +++ b/tests/modules/gatk4/indexfeaturefile/test.yml @@ -2,7 +2,7 @@ # a) the path to the file is embedded inside it, # b) the file is binary so we can't check for text inside it. - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -10,7 +10,7 @@ - path: output/gatk4/genome.bed.idx - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed_gz - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -22,7 +22,7 @@ # a) the path to the file is embedded inside it, # b) the file is binary so we can't check for text inside it. - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -30,7 +30,7 @@ - path: output/gatk4/test.genome.vcf.idx - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf_gz - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile diff --git a/tests/modules/gatk4/intervallisttools/main.nf b/tests/modules/gatk4/intervallisttools/main.nf index 59be74e8..535923fb 100644 --- a/tests/modules/gatk4/intervallisttools/main.nf +++ b/tests/modules/gatk4/intervallisttools/main.nf @@ -2,15 +2,17 @@ nextflow.enable.dsl = 2 -test_options = ['args': '--SCATTER_COUNT 6 --SUBDIVISION_MODE BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW --UNIQUE true --SORT true'] -include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' addParams( options: [:] ) -include { GATK4_INTERVALLISTTOOLS as INTERVALLISTTOOLS } from '../../../../modules/gatk4/intervallisttools/main.nf' addParams( options: test_options ) +include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' +include { GATK4_INTERVALLISTTOOLS } from '../../../../modules/gatk4/intervallisttools/main.nf' workflow test_gatk4_intervallisttools { - input = [ [ id:'test' ], [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ]] + input = [ + [ id:'test' ], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) GATK4_BEDTOINTERVALLIST ( input, dict ) - INTERVALLISTTOOLS ( GATK4_BEDTOINTERVALLIST.out.interval_list ) + GATK4_INTERVALLISTTOOLS ( GATK4_BEDTOINTERVALLIST.out.interval_list ) } diff --git a/tests/modules/gatk4/intervallisttools/nextflow.config b/tests/modules/gatk4/intervallisttools/nextflow.config new file mode 100644 index 00000000..b751ad9b --- /dev/null +++ b/tests/modules/gatk4/intervallisttools/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_INTERVALLISTTOOLS { + ext.args = '--SCATTER_COUNT 6 --SUBDIVISION_MODE BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW --UNIQUE true --SORT true' + } + +} diff --git a/tests/modules/gatk4/intervallisttools/test.yml b/tests/modules/gatk4/intervallisttools/test.yml index da3e6172..c9cb23b8 100644 --- a/tests/modules/gatk4/intervallisttools/test.yml +++ b/tests/modules/gatk4/intervallisttools/test.yml @@ -1,16 +1,16 @@ - name: gatk4 intervallisttools test_gatk4_intervallisttools - command: nextflow run tests/modules/gatk4/intervallisttools -entry test_gatk4_intervallisttools -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/intervallisttools -entry test_gatk4_intervallisttools -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/intervallisttools/nextflow.config tags: - gatk4 - gatk4/intervallisttools files: - path: output/gatk4/test.interval_list md5sum: e51101c9357fb2d59fd30e370eefa39c - - path: output/intervallisttools/test_split/temp_0001_of_6/1scattered.interval_list + - path: output/gatk4/test_split/temp_0001_of_6/1scattered.interval_list md5sum: b8ba8a387200df76a0d1c577626dc265 - - path: output/intervallisttools/test_split/temp_0002_of_6/2scattered.interval_list + - path: output/gatk4/test_split/temp_0002_of_6/2scattered.interval_list md5sum: 0728d164666d9264ef442a493e008dee - - path: output/intervallisttools/test_split/temp_0003_of_6/3scattered.interval_list + - path: output/gatk4/test_split/temp_0003_of_6/3scattered.interval_list md5sum: 55da0f3c69504148f4e7002a0e072cfe - - path: output/intervallisttools/test_split/temp_0004_of_6/4scattered.interval_list + - path: output/gatk4/test_split/temp_0004_of_6/4scattered.interval_list md5sum: d29ca4447f32547f2936567fa902796a diff --git a/tests/modules/gatk4/learnreadorientationmodel/main.nf b/tests/modules/gatk4/learnreadorientationmodel/main.nf index 1a71873e..dc6e8b89 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/main.nf +++ b/tests/modules/gatk4/learnreadorientationmodel/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_LEARNREADORIENTATIONMODEL } from '../../../../modules/gatk4/learnreadorientationmodel/main.nf' addParams( options: [suffix:'.artifact-prior'] ) +include { GATK4_LEARNREADORIENTATIONMODEL } from '../../../../modules/gatk4/learnreadorientationmodel/main.nf' workflow test_gatk4_learnreadorientationmodel { diff --git a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config new file mode 100644 index 00000000..3a74623a --- /dev/null +++ b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_LEARNREADORIENTATIONMODEL { + ext.suffix = '.artifact-prior' + } + +} diff --git a/tests/modules/gatk4/learnreadorientationmodel/test.yml b/tests/modules/gatk4/learnreadorientationmodel/test.yml index 6e999fa6..b88df15f 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/test.yml +++ b/tests/modules/gatk4/learnreadorientationmodel/test.yml @@ -1,5 +1,5 @@ - name: gatk4 learnreadorientationmodel test_gatk4_learnreadorientationmodel - command: nextflow run tests/modules/gatk4/learnreadorientationmodel -entry test_gatk4_learnreadorientationmodel -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/learnreadorientationmodel -entry test_gatk4_learnreadorientationmodel -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/learnreadorientationmodel/nextflow.config tags: - gatk4 - gatk4/learnreadorientationmodel diff --git a/tests/modules/gatk4/markduplicates/main.nf b/tests/modules/gatk4/markduplicates/main.nf index f80c1bd5..90fd866f 100644 --- a/tests/modules/gatk4/markduplicates/main.nf +++ b/tests/modules/gatk4/markduplicates/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MARKDUPLICATES } from '../../../../modules/gatk4/markduplicates/main.nf' addParams( options: [:] ) +include { GATK4_MARKDUPLICATES } from '../../../../modules/gatk4/markduplicates/main.nf' workflow test_gatk4_markduplicates { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/gatk4/markduplicates/nextflow.config b/tests/modules/gatk4/markduplicates/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/markduplicates/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index f4345bc4..0d4c7393 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -1,5 +1,5 @@ - name: gatk4 markduplicates test_gatk4_markduplicates - command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicates/nextflow.config tags: - gatk4/markduplicates - gatk4 @@ -11,7 +11,7 @@ - path: output/gatk4/test.metrics - name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams - command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicates/nextflow.config tags: - gatk4/markduplicates - gatk4 diff --git a/tests/modules/gatk4/mergebamalignment/main.nf b/tests/modules/gatk4/mergebamalignment/main.nf index 745113ae..59bd833b 100644 --- a/tests/modules/gatk4/mergebamalignment/main.nf +++ b/tests/modules/gatk4/mergebamalignment/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MERGEBAMALIGNMENT } from '../../../../modules/gatk4/mergebamalignment/main.nf' addParams( options: [:] ) +include { GATK4_MERGEBAMALIGNMENT } from '../../../../modules/gatk4/mergebamalignment/main.nf' workflow test_gatk4_mergebamalignment { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/mergebamalignment/nextflow.config b/tests/modules/gatk4/mergebamalignment/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/mergebamalignment/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index 4fb98e3d..5e1ab8d5 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mergebamalignment test_gatk4_mergebamalignment - command: nextflow run tests/modules/gatk4/mergebamalignment -entry test_gatk4_mergebamalignment -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergebamalignment -entry test_gatk4_mergebamalignment -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergebamalignment/nextflow.config tags: - gatk4 - gatk4/mergebamalignment diff --git a/tests/modules/gatk4/mergevcfs/main.nf b/tests/modules/gatk4/mergevcfs/main.nf index 5da894ab..fa09d758 100644 --- a/tests/modules/gatk4/mergevcfs/main.nf +++ b/tests/modules/gatk4/mergevcfs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MERGEVCFS } from '../../../../modules/gatk4/mergevcfs/main.nf' addParams( options: [:] ) +include { GATK4_MERGEVCFS } from '../../../../modules/gatk4/mergevcfs/main.nf' workflow test_gatk4_mergevcfs { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/mergevcfs/nextflow.config b/tests/modules/gatk4/mergevcfs/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/mergevcfs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/mergevcfs/test.yml b/tests/modules/gatk4/mergevcfs/test.yml index 884738b0..3ff2bf93 100644 --- a/tests/modules/gatk4/mergevcfs/test.yml +++ b/tests/modules/gatk4/mergevcfs/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mergevcfs test_gatk4_mergevcfs - command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config tags: - gatk4/mergevcfs - gatk4 @@ -8,7 +8,7 @@ md5sum: 5b289bda88d3a3504f2e19ee8cff177c - name: gatk4 mergevcfs test_gatk4_mergevcfs_refdict - command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config tags: - gatk4/mergevcfs - gatk4 diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index e163cf9c..f477a0d7 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [:] ) +include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' // used to run with the mitochondria mode setting as this increases sensitivity, allowing for some tumor_normal variants to be detected while the old test data is still in use, will be removed when new test data for sarek is available. -include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [args: '--mitochondria-mode'] ) +include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' workflow test_gatk4_mutect2_tumor_normal_pair { input = [ [ id:'test'], // meta map diff --git a/tests/modules/gatk4/mutect2/nextflow.config b/tests/modules/gatk4/mutect2/nextflow.config new file mode 100644 index 00000000..0966fc15 --- /dev/null +++ b/tests/modules/gatk4/mutect2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_TEMPFIX_MUTECT2 { + ext.args = '--mitochondria-mode' + } + +} diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 031ed072..c6801e04 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -11,7 +11,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_tumor_single - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -22,7 +22,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_cram_input - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -33,7 +33,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_generate_pon - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -44,7 +44,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_mitochondria - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 diff --git a/tests/modules/gatk4/revertsam/main.nf b/tests/modules/gatk4/revertsam/main.nf index df127c9b..ab5dddee 100644 --- a/tests/modules/gatk4/revertsam/main.nf +++ b/tests/modules/gatk4/revertsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_REVERTSAM } from '../../../../modules/gatk4/revertsam/main.nf' addParams( options: [:] ) +include { GATK4_REVERTSAM } from '../../../../modules/gatk4/revertsam/main.nf' workflow test_gatk4_revertsam { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/revertsam/nextflow.config b/tests/modules/gatk4/revertsam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/revertsam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index c65d3666..4199b118 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -1,5 +1,5 @@ - name: gatk4 revertsam test_gatk4_revertsam - command: nextflow run tests/modules/gatk4/revertsam -entry test_gatk4_revertsam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/revertsam -entry test_gatk4_revertsam -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/revertsam/nextflow.config tags: - gatk4 - gatk4/revertsam diff --git a/tests/modules/gatk4/samtofastq/main.nf b/tests/modules/gatk4/samtofastq/main.nf index db63a2a5..26a8ce2d 100644 --- a/tests/modules/gatk4/samtofastq/main.nf +++ b/tests/modules/gatk4/samtofastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_SAMTOFASTQ } from '../../../../modules/gatk4/samtofastq/main.nf' addParams( options: [:] ) +include { GATK4_SAMTOFASTQ } from '../../../../modules/gatk4/samtofastq/main.nf' workflow test_gatk4_samtofastq_single_end { input = [ [ id:'test', single_end: true ], // meta map diff --git a/tests/modules/gatk4/samtofastq/nextflow.config b/tests/modules/gatk4/samtofastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/samtofastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/samtofastq/test.yml b/tests/modules/gatk4/samtofastq/test.yml index 3d877d2f..66d3ee4c 100644 --- a/tests/modules/gatk4/samtofastq/test.yml +++ b/tests/modules/gatk4/samtofastq/test.yml @@ -1,5 +1,5 @@ - name: gatk4 samtofastq test_gatk4_samtofastq_single_end - command: nextflow run tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config tags: - gatk4 - gatk4/samtofastq @@ -8,7 +8,7 @@ md5sum: 50ace41d4c24467f24f8b929540a7797 - name: gatk4 samtofastq test_gatk4_samtofastq_paired_end - command: nextflow run tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config tags: - gatk4 - gatk4/samtofastq diff --git a/tests/modules/gatk4/splitncigarreads/main.nf b/tests/modules/gatk4/splitncigarreads/main.nf index 0934593f..7e5b7c9a 100644 --- a/tests/modules/gatk4/splitncigarreads/main.nf +++ b/tests/modules/gatk4/splitncigarreads/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarreads/main.nf' addParams( options: [:] ) +include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarreads/main.nf' workflow test_gatk4_splitncigarreads { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/splitncigarreads/nextflow.config b/tests/modules/gatk4/splitncigarreads/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/splitncigarreads/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index 146cd329..1ba8c5cd 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -1,5 +1,5 @@ - name: gatk4 splitncigarreads test_gatk4_splitncigarreads - command: nextflow run tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/splitncigarreads/nextflow.config tags: - gatk4 - gatk4/splitncigarreads diff --git a/tests/modules/gatk4/variantfiltration/main.nf b/tests/modules/gatk4/variantfiltration/main.nf index 67c9daec..221c469a 100644 --- a/tests/modules/gatk4/variantfiltration/main.nf +++ b/tests/modules/gatk4/variantfiltration/main.nf @@ -2,35 +2,38 @@ nextflow.enable.dsl = 2 -test_options = ['args': '--filter-name "test_filter" --filter-expression "MQ0 > 0"', 'suffix': '.filtered'] -include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' addParams( options: test_options ) +include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' // Basic parameters with uncompressed VCF input workflow test_gatk4_variantfiltration_vcf_input { - input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) } // Basic parameters with compressed VCF input workflow test_gatk4_variantfiltration_gz_input { - input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) } diff --git a/tests/modules/gatk4/variantfiltration/nextflow.config b/tests/modules/gatk4/variantfiltration/nextflow.config new file mode 100644 index 00000000..ff2feb9c --- /dev/null +++ b/tests/modules/gatk4/variantfiltration/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_VARIANTFILTRATION { + ext.args = "--filter-name \'test_filter\' --filter-expression \'MQ0 > 0\'" + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index e3177cfc..b5da0e5c 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -1,5 +1,5 @@ - name: gatk4 variantfiltration test_gatk4_variantfiltration_vcf_input - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config tags: - gatk4/variantfiltration - gatk4 @@ -9,7 +9,7 @@ - path: output/gatk4/test.filtered.vcf.gz.tbi - name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config tags: - gatk4/variantfiltration - gatk4 diff --git a/tests/modules/genmap/index/main.nf b/tests/modules/genmap/index/main.nf index 358ebb35..06106640 100644 --- a/tests/modules/genmap/index/main.nf +++ b/tests/modules/genmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [publish_dir:'genmap'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' workflow test_genmap_index { diff --git a/tests/modules/genmap/index/nextflow.config b/tests/modules/genmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/genmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/genmap/index/test.yml b/tests/modules/genmap/index/test.yml index c5078014..ce2098ce 100644 --- a/tests/modules/genmap/index/test.yml +++ b/tests/modules/genmap/index/test.yml @@ -1,5 +1,5 @@ - name: genmap index test_genmap_index - command: nextflow run tests/modules/genmap/index -entry test_genmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/index -entry test_genmap_index -c ./tests/config/nextflow.config -c ./tests/modules/genmap/index/nextflow.config tags: - genmap - genmap/index diff --git a/tests/modules/genmap/mappability/main.nf b/tests/modules/genmap/mappability/main.nf index 636ec0e4..eb6a34fa 100644 --- a/tests/modules/genmap/mappability/main.nf +++ b/tests/modules/genmap/mappability/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [:] ) -include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' addParams( options: [args : '-K 50 -E 2 -w -t -bg'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' +include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' workflow test_genmap_map { diff --git a/tests/modules/genmap/mappability/nextflow.config b/tests/modules/genmap/mappability/nextflow.config new file mode 100644 index 00000000..6936b9ea --- /dev/null +++ b/tests/modules/genmap/mappability/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENMAP_MAPPABILITY { + ext.args = '-K 50 -E 2 -w -t -bg' + } + +} diff --git a/tests/modules/genmap/mappability/test.yml b/tests/modules/genmap/mappability/test.yml index 29a12de1..94c1d501 100644 --- a/tests/modules/genmap/mappability/test.yml +++ b/tests/modules/genmap/mappability/test.yml @@ -1,5 +1,5 @@ - name: genmap mappability test_genmap_map - command: nextflow run tests/modules/genmap/mappability -entry test_genmap_map -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/mappability -entry test_genmap_map -c ./tests/config/nextflow.config -c ./tests/modules/genmap/mappability/nextflow.config tags: - genmap - genmap/mappability diff --git a/tests/modules/genrich/main.nf b/tests/modules/genrich/main.nf index aa1a2d49..34db589e 100644 --- a/tests/modules/genrich/main.nf +++ b/tests/modules/genrich/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { GENRICH } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.1"] ) -include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.9"] ) -include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-r -p 0.1"] ) -include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-j -p 0.1"] ) +include { GENRICH } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' workflow test_genrich { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/genrich/nextflow.config b/tests/modules/genrich/nextflow.config new file mode 100644 index 00000000..8f79d7be --- /dev/null +++ b/tests/modules/genrich/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENRICH { + ext.args = '-p 0.1' + } + + withName: GENRICH_CTRL { + ext.args = '-p 0.9' + } + + withName: GENRICH_ALL { + ext.args = '-r -p 0.1' + } + + withName: GENRICH_ATACSEQ { + ext.args = '-j -p 0.1' + } + +} diff --git a/tests/modules/genrich/test.yml b/tests/modules/genrich/test.yml index 63bf2927..972335c4 100644 --- a/tests/modules/genrich/test.yml +++ b/tests/modules/genrich/test.yml @@ -1,5 +1,5 @@ - name: genrich test_genrich - command: nextflow run tests/modules/genrich -entry test_genrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -7,7 +7,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_ctrl - command: nextflow run tests/modules/genrich -entry test_genrich_ctrl -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -15,7 +15,7 @@ md5sum: 2fcc392360b317f5ebee88cdbc149e05 - name: genrich test_genrich_all_outputs - command: nextflow run tests/modules/genrich -entry test_genrich_all_outputs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_all_outputs -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -31,7 +31,7 @@ md5sum: b14feef34b6d2379a173a734ca963cde - name: genrich test_genrich_blacklist - command: nextflow run tests/modules/genrich -entry test_genrich_blacklist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_blacklist -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -39,7 +39,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_atacseq - command: nextflow run tests/modules/genrich -entry test_genrich_atacseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_atacseq -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: diff --git a/tests/modules/gffread/main.nf b/tests/modules/gffread/main.nf index 87e95275..6ab7922b 100644 --- a/tests/modules/gffread/main.nf +++ b/tests/modules/gffread/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GFFREAD } from '../../../modules/gffread/main.nf' addParams( options: [suffix: '.out'] ) +include { GFFREAD } from '../../../modules/gffread/main.nf' workflow test_gffread { input = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config new file mode 100644 index 00000000..00c052f5 --- /dev/null +++ b/tests/modules/gffread/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.suffix = '.out' + } + +} diff --git a/tests/modules/gffread/test.yml b/tests/modules/gffread/test.yml index 48096f1e..c5a16132 100644 --- a/tests/modules/gffread/test.yml +++ b/tests/modules/gffread/test.yml @@ -1,5 +1,5 @@ - name: gffread - command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c ./tests/config/nextflow.config -c ./tests/modules/gffread/nextflow.config tags: - gffread files: diff --git a/tests/modules/glnexus/main.nf b/tests/modules/glnexus/main.nf index 2a79b2fa..aeb7c7e2 100644 --- a/tests/modules/glnexus/main.nf +++ b/tests/modules/glnexus/main.nf @@ -2,12 +2,16 @@ nextflow.enable.dsl = 2 -include { GLNEXUS } from '../../../modules/glnexus/main.nf' addParams( options: [:] ) +include { GLNEXUS } from '../../../modules/glnexus/main.nf' workflow test_glnexus { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) + ] + ] + GLNEXUS ( input ) } diff --git a/tests/modules/glnexus/nextflow.config b/tests/modules/glnexus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/glnexus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/glnexus/test.yml b/tests/modules/glnexus/test.yml index c7b255ee..bfca4529 100644 --- a/tests/modules/glnexus/test.yml +++ b/tests/modules/glnexus/test.yml @@ -1,7 +1,7 @@ - name: glnexus test_glnexus - command: nextflow run tests/modules/glnexus -entry test_glnexus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/glnexus -entry test_glnexus -c ./tests/config/nextflow.config -c ./tests/modules/glnexus/nextflow.config tags: - glnexus files: - path: output/glnexus/test.bcf - md5sum: 33ac8c9f3ff54e6a23177ba94a449173 + md5sum: 62b2cea9c1b92ac63645cb031eea46fc diff --git a/tests/modules/graphmap2/align/main.nf b/tests/modules/graphmap2/align/main.nf index 0cd885ab..96b95166 100644 --- a/tests/modules/graphmap2/align/main.nf +++ b/tests/modules/graphmap2/align/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) -include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' +include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' workflow test_graphmap2_align { diff --git a/tests/modules/graphmap2/align/nextflow.config b/tests/modules/graphmap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/align/test.yml b/tests/modules/graphmap2/align/test.yml index 7e90b8d4..90e52dd1 100644 --- a/tests/modules/graphmap2/align/test.yml +++ b/tests/modules/graphmap2/align/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 align - command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/align/nextflow.config tags: - graphmap2 - graphmap2/align diff --git a/tests/modules/graphmap2/index/main.nf b/tests/modules/graphmap2/index/main.nf index 66347f06..3c449c6b 100644 --- a/tests/modules/graphmap2/index/main.nf +++ b/tests/modules/graphmap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' workflow test_graphmap2_index { diff --git a/tests/modules/graphmap2/index/nextflow.config b/tests/modules/graphmap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/index/test.yml b/tests/modules/graphmap2/index/test.yml index 15042e97..0bff487e 100644 --- a/tests/modules/graphmap2/index/test.yml +++ b/tests/modules/graphmap2/index/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 index - command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/index/nextflow.config tags: - graphmap2 - graphmap2/index diff --git a/tests/modules/gstama/collapse/main.nf b/tests/modules/gstama/collapse/main.nf index 70b3c741..3eb97767 100644 --- a/tests/modules/gstama/collapse/main.nf +++ b/tests/modules/gstama/collapse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' addParams( options: [ args:"-x capped -b BAM", suffix:'_tc' ] ) +include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' workflow test_gstama_collapse { diff --git a/tests/modules/gstama/collapse/nextflow.config b/tests/modules/gstama/collapse/nextflow.config new file mode 100644 index 00000000..0455c8b2 --- /dev/null +++ b/tests/modules/gstama/collapse/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_COLLAPSE { + ext.args = '-x capped -b BAM' + ext.suffix = '_tc' + } + +} diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml index 3815a156..50d3775e 100644 --- a/tests/modules/gstama/collapse/test.yml +++ b/tests/modules/gstama/collapse/test.yml @@ -1,5 +1,5 @@ - name: gstama collapse test_gstama_collapse - command: nextflow run tests/modules/gstama/collapse -entry test_gstama_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/collapse -entry test_gstama_collapse -c ./tests/config/nextflow.config -c ./tests/modules/gstama/collapse/nextflow.config tags: - gstama - gstama/collapse diff --git a/tests/modules/gstama/merge/main.nf b/tests/modules/gstama/merge/main.nf index f9a8e05f..4a9102a2 100644 --- a/tests/modules/gstama/merge/main.nf +++ b/tests/modules/gstama/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' addParams( options: [suffix:'_merged'] ) +include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' workflow test_gstama_merge { diff --git a/tests/modules/gstama/merge/nextflow.config b/tests/modules/gstama/merge/nextflow.config new file mode 100644 index 00000000..a9c63fcf --- /dev/null +++ b/tests/modules/gstama/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_MERGE { + ext.suffix = '_merged' + } + +} diff --git a/tests/modules/gstama/merge/test.yml b/tests/modules/gstama/merge/test.yml index b98e35b6..1db35d15 100644 --- a/tests/modules/gstama/merge/test.yml +++ b/tests/modules/gstama/merge/test.yml @@ -1,5 +1,5 @@ - name: gstama merge test_gstama_merge - command: nextflow run tests/modules/gstama/merge -entry test_gstama_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/merge -entry test_gstama_merge -c ./tests/config/nextflow.config -c ./tests/modules/gstama/merge/nextflow.config tags: - gstama - gstama/merge diff --git a/tests/modules/gtdbtk/classifywf/main.nf b/tests/modules/gtdbtk/classifywf/main.nf index f52b0ccc..1517d7cc 100644 --- a/tests/modules/gtdbtk/classifywf/main.nf +++ b/tests/modules/gtdbtk/classifywf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' addParams( options: [:] ) +include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' process STUB_GTDBTK_DATABASE { output: diff --git a/tests/modules/gtdbtk/classifywf/nextflow.config b/tests/modules/gtdbtk/classifywf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gtdbtk/classifywf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gtdbtk/classifywf/test.yml b/tests/modules/gtdbtk/classifywf/test.yml index 6d0f055e..e24f1e17 100644 --- a/tests/modules/gtdbtk/classifywf/test.yml +++ b/tests/modules/gtdbtk/classifywf/test.yml @@ -1,5 +1,5 @@ - name: gtdbtk classifywf - command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c ./tests/config/nextflow.config -stub-run -c ./tests/modules/gtdbtk/classifywf/nextflow.config tags: - gtdbtk - gtdbtk/classifywf diff --git a/tests/modules/gubbins/main.nf b/tests/modules/gubbins/main.nf index 87e164d0..342150b3 100644 --- a/tests/modules/gubbins/main.nf +++ b/tests/modules/gubbins/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUBBINS } from '../../../modules/gubbins/main.nf' addParams( options: [:] ) +include { GUBBINS } from '../../../modules/gubbins/main.nf' workflow test_gubbins { input = file(params.test_data['sarscov2']['genome']['all_sites_fas'], checkIfExists: true) diff --git a/tests/modules/gubbins/nextflow.config b/tests/modules/gubbins/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gubbins/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gubbins/test.yml b/tests/modules/gubbins/test.yml index 7bc0216b..6c85260d 100644 --- a/tests/modules/gubbins/test.yml +++ b/tests/modules/gubbins/test.yml @@ -1,5 +1,5 @@ - name: gubbins - command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c ./tests/config/nextflow.config -c ./tests/modules/gubbins/nextflow.config tags: - gubbins files: diff --git a/tests/modules/gunc/downloaddb/main.nf b/tests/modules/gunc/downloaddb/main.nf index c0321279..3e3126f5 100644 --- a/tests/modules/gunc/downloaddb/main.nf +++ b/tests/modules/gunc/downloaddb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_downloaddb { diff --git a/tests/modules/gunc/downloaddb/nextflow.config b/tests/modules/gunc/downloaddb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/downloaddb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/downloaddb/test.yml b/tests/modules/gunc/downloaddb/test.yml index d1aafae7..4e1c23f8 100644 --- a/tests/modules/gunc/downloaddb/test.yml +++ b/tests/modules/gunc/downloaddb/test.yml @@ -1,5 +1,5 @@ - name: gunc downloaddb - command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c ./tests/config/nextflow.config -c ./tests/modules/gunc/downloaddb/nextflow.config tags: - gunc - gunc/downloaddb diff --git a/tests/modules/gunc/run/main.nf b/tests/modules/gunc/run/main.nf index a1a191dc..28ecd35f 100644 --- a/tests/modules/gunc/run/main.nf +++ b/tests/modules/gunc/run/main.nf @@ -2,16 +2,16 @@ nextflow.enable.dsl = 2 -include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' addParams( options: [:] ) -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) - +include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_run { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] - - GUNC_DOWNLOADDB('progenomes') + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) + ] + GUNC_DOWNLOADDB ( 'progenomes' ) GUNC_RUN ( input, GUNC_DOWNLOADDB.out.db ) } diff --git a/tests/modules/gunc/run/nextflow.config b/tests/modules/gunc/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/run/test.yml b/tests/modules/gunc/run/test.yml index d527f37e..5bcef868 100644 --- a/tests/modules/gunc/run/test.yml +++ b/tests/modules/gunc/run/test.yml @@ -1,5 +1,5 @@ - name: gunc run - command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c ./tests/config/nextflow.config -c ./tests/modules/gunc/run/nextflow.config tags: - gunc - gunc/run diff --git a/tests/modules/gunzip/main.nf b/tests/modules/gunzip/main.nf index 0c23a8cd..3d41a4a2 100644 --- a/tests/modules/gunzip/main.nf +++ b/tests/modules/gunzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUNZIP } from '../../../modules/gunzip/main.nf' addParams( options: [:] ) +include { GUNZIP } from '../../../modules/gunzip/main.nf' workflow test_gunzip { input = [ [], diff --git a/tests/modules/gunzip/nextflow.config b/tests/modules/gunzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunzip/test.yml b/tests/modules/gunzip/test.yml index 70012b21..70e95d6b 100644 --- a/tests/modules/gunzip/test.yml +++ b/tests/modules/gunzip/test.yml @@ -1,5 +1,5 @@ - name: gunzip - command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c ./tests/config/nextflow.config -c ./tests/modules/gunzip/nextflow.config tags: - gunzip files: diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf index 3ac9c20b..82c515de 100644 --- a/tests/modules/hicap/main.nf +++ b/tests/modules/hicap/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] ) +include { HICAP } from '../../../modules/hicap/main.nf' workflow test_hicap { - - input = [ [ id:'test', single_end:false ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/GCF_900478275.fna.gz", checkIfExists: true) ] - + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true) + ] database_dir = [] model_fp = [] diff --git a/tests/modules/hicap/nextflow.config b/tests/modules/hicap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hicap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hicap/test.yml b/tests/modules/hicap/test.yml index 8c8420fd..0cce28c7 100644 --- a/tests/modules/hicap/test.yml +++ b/tests/modules/hicap/test.yml @@ -1,10 +1,10 @@ - name: hicap test_hicap - command: nextflow run tests/modules/hicap -entry test_hicap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hicap -entry test_hicap -c ./tests/config/nextflow.config -c ./tests/modules/hicap/nextflow.config tags: - hicap files: - - path: output/hicap/GCF_900478275.gbk + - path: output/hicap/genome.gbk md5sum: 562d026956903354ac80721f501335d4 - - path: output/hicap/GCF_900478275.svg + - path: output/hicap/genome.svg md5sum: 4fb94871dd0fdd8b4496049668176631 - - path: output/hicap/GCF_900478275.tsv + - path: output/hicap/genome.tsv diff --git a/tests/modules/hifiasm/main.nf b/tests/modules/hifiasm/main.nf index 30614389..f0e2a0f4 100644 --- a/tests/modules/hifiasm/main.nf +++ b/tests/modules/hifiasm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HIFIASM } from '../../../modules/hifiasm/main.nf' addParams( options: [args:'-f0'] ) +include { HIFIASM } from '../../../modules/hifiasm/main.nf' /* * Test with long reads only diff --git a/tests/modules/hifiasm/nextflow.config b/tests/modules/hifiasm/nextflow.config new file mode 100644 index 00000000..0994c901 --- /dev/null +++ b/tests/modules/hifiasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HIFIASM { + ext.args = '-f0' + } + +} diff --git a/tests/modules/hifiasm/test.yml b/tests/modules/hifiasm/test.yml index 47d9e38f..f7e3e6ae 100644 --- a/tests/modules/hifiasm/test.yml +++ b/tests/modules/hifiasm/test.yml @@ -1,5 +1,5 @@ - name: hifiasm test_hifiasm_hifi_only - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_hifi_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_hifi_only -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: @@ -16,7 +16,7 @@ - path: output/hifiasm/test.asm.ovlp.source.bin - name: hifiasm test_hifiasm_with_parental_reads - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: diff --git a/tests/modules/hisat2/align/main.nf b/tests/modules/hisat2/align/main.nf index 7bbe3a4b..17b47c93 100644 --- a/tests/modules/hisat2/align/main.nf +++ b/tests/modules/hisat2/align/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) -include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' +include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' workflow test_hisat2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -19,10 +22,13 @@ workflow test_hisat2_align_single_end { } workflow test_hisat2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/align/nextflow.config b/tests/modules/hisat2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/align/test.yml b/tests/modules/hisat2/align/test.yml index 1c6c8ac2..54e263bc 100644 --- a/tests/modules/hisat2/align/test.yml +++ b/tests/modules/hisat2/align/test.yml @@ -1,5 +1,5 @@ - name: hisat2 align test_hisat2_align_single_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -9,25 +9,25 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - name: hisat2 align test_hisat2_align_paired_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -37,19 +37,19 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/build_test/main.nf b/tests/modules/hisat2/build_test/main.nf index f40f47cc..a0c14dc8 100644 --- a/tests/modules/hisat2/build_test/main.nf +++ b/tests/modules/hisat2/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' workflow test_hisat2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hisat2/build_test/nextflow.config b/tests/modules/hisat2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/build_test/test.yml b/tests/modules/hisat2/build_test/test.yml index a8bb2390..da5a450c 100644 --- a/tests/modules/hisat2/build_test/test.yml +++ b/tests/modules/hisat2/build_test/test.yml @@ -1,24 +1,24 @@ - name: hisat2 build test_hisat2_build - command: nextflow run tests/modules/hisat2/build_test -entry test_hisat2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/build_test -entry test_hisat2_build -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/build/nextflow.config tags: - hisat2 - hisat2/build files: - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/extractsplicesites/main.nf b/tests/modules/hisat2/extractsplicesites/main.nf index 5c7e17b9..e947717e 100644 --- a/tests/modules/hisat2/extractsplicesites/main.nf +++ b/tests/modules/hisat2/extractsplicesites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' workflow test_hisat2_extractsplicesites { gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/extractsplicesites/nextflow.config b/tests/modules/hisat2/extractsplicesites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/extractsplicesites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/extractsplicesites/test.yml b/tests/modules/hisat2/extractsplicesites/test.yml index a3e29346..a528199c 100644 --- a/tests/modules/hisat2/extractsplicesites/test.yml +++ b/tests/modules/hisat2/extractsplicesites/test.yml @@ -1,5 +1,5 @@ - name: hisat2 extractsplicesites test_hisat2_extractsplicesites - command: nextflow run tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/extractsplicesites/nextflow.config tags: - hisat2 - hisat2/extractsplicesites diff --git a/tests/modules/hmmcopy/gccounter/main.nf b/tests/modules/hmmcopy/gccounter/main.nf index 30846ca9..05728bf5 100644 --- a/tests/modules/hmmcopy/gccounter/main.nf +++ b/tests/modules/hmmcopy/gccounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' workflow test_hmmcopy_gccounter { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hmmcopy/gccounter/nextflow.config b/tests/modules/hmmcopy/gccounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/gccounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/gccounter/test.yml b/tests/modules/hmmcopy/gccounter/test.yml index edcd6b92..1cd20273 100644 --- a/tests/modules/hmmcopy/gccounter/test.yml +++ b/tests/modules/hmmcopy/gccounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy gccounter test_hmmcopy_gccounter - command: nextflow run tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/gccounter/nextflow.config tags: - hmmcopy - hmmcopy/gccounter diff --git a/tests/modules/hmmcopy/readcounter/main.nf b/tests/modules/hmmcopy/readcounter/main.nf index 9025f98e..21737aab 100644 --- a/tests/modules/hmmcopy/readcounter/main.nf +++ b/tests/modules/hmmcopy/readcounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' workflow test_hmmcopy_readcounter { diff --git a/tests/modules/hmmcopy/readcounter/nextflow.config b/tests/modules/hmmcopy/readcounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/readcounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/readcounter/test.yml b/tests/modules/hmmcopy/readcounter/test.yml index 6c00ee08..a7e84f35 100644 --- a/tests/modules/hmmcopy/readcounter/test.yml +++ b/tests/modules/hmmcopy/readcounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy readcounter test_hmmcopy_readcounter - command: nextflow run tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/readcounter/nextflow.config tags: - hmmcopy - hmmcopy/readcounter diff --git a/tests/modules/hmmer/hmmalign/main.nf b/tests/modules/hmmer/hmmalign/main.nf index 55194dc6..3bf6d452 100644 --- a/tests/modules/hmmer/hmmalign/main.nf +++ b/tests/modules/hmmer/hmmalign/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' addParams( options: [:] ) +include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' workflow test_hmmer_hmmalign { diff --git a/tests/modules/hmmer/hmmalign/nextflow.config b/tests/modules/hmmer/hmmalign/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmer/hmmalign/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmer/hmmalign/test.yml b/tests/modules/hmmer/hmmalign/test.yml index 4afb34ca..2e5ccfaf 100644 --- a/tests/modules/hmmer/hmmalign/test.yml +++ b/tests/modules/hmmer/hmmalign/test.yml @@ -1,5 +1,5 @@ - name: hmmer hmmalign test_hmmer_hmmalign - command: nextflow run tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c ./tests/config/nextflow.config -c ./tests/modules/hmmer/hmmalign/nextflow.config tags: - hmmer - hmmer/hmmalign diff --git a/tests/modules/homer/annotatepeaks/main.nf b/tests/modules/homer/annotatepeaks/main.nf index b146c857..ab8f6f8f 100644 --- a/tests/modules/homer/annotatepeaks/main.nf +++ b/tests/modules/homer/annotatepeaks/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' addParams( options: [:] ) +include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' workflow test_homer_annotatepeaks { input = [ [ id:'test'], diff --git a/tests/modules/homer/annotatepeaks/nextflow.config b/tests/modules/homer/annotatepeaks/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/homer/annotatepeaks/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/homer/annotatepeaks/test.yml b/tests/modules/homer/annotatepeaks/test.yml index fed0f82e..52fd99a3 100644 --- a/tests/modules/homer/annotatepeaks/test.yml +++ b/tests/modules/homer/annotatepeaks/test.yml @@ -1,5 +1,5 @@ - name: homer annotatepeaks test_homer_annotatepeaks - command: nextflow run tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/annotatepeaks/nextflow.config tags: - homer - homer/annotatepeaks diff --git a/tests/modules/homer/findpeaks/main.nf b/tests/modules/homer/findpeaks/main.nf index 06d44bdf..0e7e8ed6 100644 --- a/tests/modules/homer/findpeaks/main.nf +++ b/tests/modules/homer/findpeaks/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' addParams( options: [args: '-style factor'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' workflow test_homer_findpeaks { input = [[id:'test'], diff --git a/tests/modules/homer/findpeaks/nextflow.config b/tests/modules/homer/findpeaks/nextflow.config new file mode 100644 index 00000000..9a921a3c --- /dev/null +++ b/tests/modules/homer/findpeaks/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + + withName: HOMER_FINDPEAKS { + ext.args = '-style factor' + } + +} diff --git a/tests/modules/homer/findpeaks/test.yml b/tests/modules/homer/findpeaks/test.yml index b0b1a0df..75e94529 100644 --- a/tests/modules/homer/findpeaks/test.yml +++ b/tests/modules/homer/findpeaks/test.yml @@ -1,5 +1,5 @@ - name: homer findpeaks - command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/findpeaks/nextflow.config tags: - homer - homer/findpeaks diff --git a/tests/modules/homer/maketagdirectory/main.nf b/tests/modules/homer/maketagdirectory/main.nf index 897aac1f..766aff0d 100644 --- a/tests/modules/homer/maketagdirectory/main.nf +++ b/tests/modules/homer/maketagdirectory/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' workflow test_homer_maketagdirectory { input = [[id:'test'], diff --git a/tests/modules/homer/maketagdirectory/nextflow.config b/tests/modules/homer/maketagdirectory/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/maketagdirectory/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml index 80112c0b..746c6ef6 100644 --- a/tests/modules/homer/maketagdirectory/test.yml +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -1,5 +1,5 @@ - name: homer maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory @@ -16,7 +16,7 @@ md5sum: e5aa2b9843ca9c04ace297280aed6af4 - name: homer meta maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory diff --git a/tests/modules/homer/makeucscfile/main.nf b/tests/modules/homer/makeucscfile/main.nf index 5ed75959..986c9c14 100644 --- a/tests/modules/homer/makeucscfile/main.nf +++ b/tests/modules/homer/makeucscfile/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' addParams( options: [:] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' workflow test_homer_makeucscfile { input = [[id:'test'], diff --git a/tests/modules/homer/makeucscfile/nextflow.config b/tests/modules/homer/makeucscfile/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/makeucscfile/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/makeucscfile/test.yml b/tests/modules/homer/makeucscfile/test.yml index 4d337f41..cf3d1b4d 100644 --- a/tests/modules/homer/makeucscfile/test.yml +++ b/tests/modules/homer/makeucscfile/test.yml @@ -1,5 +1,5 @@ - name: homer makeucscfile - command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c ./tests/config/nextflow.config -c ./tests/modules/homer/makeucscfile/nextflow.config tags: - homer - homer/makeucscfile diff --git a/tests/modules/idr/main.nf b/tests/modules/idr/main.nf index aa141a57..ed3bf289 100644 --- a/tests/modules/idr/main.nf +++ b/tests/modules/idr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IDR } from '../../../modules/idr/main.nf' addParams( options: [:] ) +include { IDR } from '../../../modules/idr/main.nf' workflow test_idr_narrowpeak { diff --git a/tests/modules/idr/nextflow.config b/tests/modules/idr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/idr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/idr/test.yml b/tests/modules/idr/test.yml index 35ee4bc9..9d5ef2a7 100644 --- a/tests/modules/idr/test.yml +++ b/tests/modules/idr/test.yml @@ -1,5 +1,5 @@ - name: idr test_idr_narrowpeak - command: nextflow run tests/modules/idr -entry test_idr_narrowpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_narrowpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -11,7 +11,7 @@ md5sum: 6443507ac66b9d3b64bc56b78328083e - name: idr test_idr_broadpeak - command: nextflow run tests/modules/idr -entry test_idr_broadpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_broadpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -23,7 +23,7 @@ md5sum: e6917133112b5cec135c182ffac19237 - name: idr test_idr_noprefix - command: nextflow run tests/modules/idr -entry test_idr_noprefix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_noprefix -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: diff --git a/tests/modules/imputeme/vcftoprs/main.nf b/tests/modules/imputeme/vcftoprs/main.nf index ff59ca5e..dccc06e0 100644 --- a/tests/modules/imputeme/vcftoprs/main.nf +++ b/tests/modules/imputeme/vcftoprs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' addParams( options: [:] ) +include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' workflow test_imputeme_vcftoprs { diff --git a/tests/modules/imputeme/vcftoprs/nextflow.config b/tests/modules/imputeme/vcftoprs/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/imputeme/vcftoprs/test.yml b/tests/modules/imputeme/vcftoprs/test.yml index efb73769..e5152a03 100644 --- a/tests/modules/imputeme/vcftoprs/test.yml +++ b/tests/modules/imputeme/vcftoprs/test.yml @@ -1,5 +1,5 @@ - name: imputeme vcftoprs test_imputeme_vcftoprs - command: nextflow run tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c ./tests/config/nextflow.config -c ./tests/modules/imputeme/vcftoprs/nextflow.config tags: - imputeme - imputeme/vcftoprs diff --git a/tests/modules/iqtree/main.nf b/tests/modules/iqtree/main.nf index 977d7c0a..2d73bd52 100644 --- a/tests/modules/iqtree/main.nf +++ b/tests/modules/iqtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IQTREE } from '../../../modules/iqtree/main.nf' addParams( options: [:] ) +include { IQTREE } from '../../../modules/iqtree/main.nf' workflow test_iqtree { diff --git a/tests/modules/iqtree/nextflow.config b/tests/modules/iqtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/iqtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/iqtree/test.yml b/tests/modules/iqtree/test.yml index e40656a2..06de90d9 100644 --- a/tests/modules/iqtree/test.yml +++ b/tests/modules/iqtree/test.yml @@ -1,5 +1,5 @@ - name: iqtree test workflow - command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c ./tests/config/nextflow.config -c ./tests/modules/iqtree/nextflow.config tags: - iqtree files: diff --git a/tests/modules/ismapper/main.nf b/tests/modules/ismapper/main.nf index b28344dc..abb180f7 100644 --- a/tests/modules/ismapper/main.nf +++ b/tests/modules/ismapper/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISMAPPER } from '../../../modules/ismapper/main.nf' addParams( options: [:] ) +include { ISMAPPER } from '../../../modules/ismapper/main.nf' workflow test_ismapper { diff --git a/tests/modules/ismapper/nextflow.config b/tests/modules/ismapper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ismapper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ismapper/test.yml b/tests/modules/ismapper/test.yml index 0574b855..b4f64448 100644 --- a/tests/modules/ismapper/test.yml +++ b/tests/modules/ismapper/test.yml @@ -1,5 +1,5 @@ - name: ismapper test_ismapper - command: nextflow run tests/modules/ismapper -entry test_ismapper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ismapper -entry test_ismapper -c ./tests/config/nextflow.config -c ./tests/modules/ismapper/nextflow.config tags: - ismapper files: diff --git a/tests/modules/isoseq3/cluster/main.nf b/tests/modules/isoseq3/cluster/main.nf index 90a24c11..958b03a6 100644 --- a/tests/modules/isoseq3/cluster/main.nf +++ b/tests/modules/isoseq3/cluster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' addParams( options: [args: '--singletons --use-qvs --verbose'] ) +include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' workflow test_isoseq3_cluster { diff --git a/tests/modules/isoseq3/cluster/nextflow.config b/tests/modules/isoseq3/cluster/nextflow.config new file mode 100644 index 00000000..8bfeaebd --- /dev/null +++ b/tests/modules/isoseq3/cluster/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_CLUSTER { + ext.args = '--singletons --use-qvs --verbose' + } + +} diff --git a/tests/modules/isoseq3/cluster/test.yml b/tests/modules/isoseq3/cluster/test.yml index 58b20ae2..b1f12df7 100644 --- a/tests/modules/isoseq3/cluster/test.yml +++ b/tests/modules/isoseq3/cluster/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 cluster test_isoseq3_cluster - command: nextflow run tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/cluster/nextflow.config tags: - isoseq3 - isoseq3/cluster diff --git a/tests/modules/isoseq3/refine/main.nf b/tests/modules/isoseq3/refine/main.nf index 13736604..45dd1560 100644 --- a/tests/modules/isoseq3/refine/main.nf +++ b/tests/modules/isoseq3/refine/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' addParams( options: [suffix:'.refine'] ) +include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' workflow test_isoseq3_refine { diff --git a/tests/modules/isoseq3/refine/nextflow.config b/tests/modules/isoseq3/refine/nextflow.config new file mode 100644 index 00000000..88f1bdc4 --- /dev/null +++ b/tests/modules/isoseq3/refine/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_REFINE { + ext.suffix = '.refine' + } + +} diff --git a/tests/modules/isoseq3/refine/test.yml b/tests/modules/isoseq3/refine/test.yml index 2e7782d3..f2c63fda 100644 --- a/tests/modules/isoseq3/refine/test.yml +++ b/tests/modules/isoseq3/refine/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 refine test_isoseq3_refine - command: nextflow run tests/modules/isoseq3/refine -entry test_isoseq3_refine -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/refine -entry test_isoseq3_refine -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/refine/nextflow.config tags: - isoseq3 - isoseq3/refine diff --git a/tests/modules/ivar/consensus/main.nf b/tests/modules/ivar/consensus/main.nf index 5e0457b5..d0807984 100644 --- a/tests/modules/ivar/consensus/main.nf +++ b/tests/modules/ivar/consensus/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 params.save_mpileup = true -include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' addParams( [ options: [args2: '-aa -A -d 0 -Q 0'] ] ) +include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' workflow test_ivar_consensus { input = [ [ id:'test'], diff --git a/tests/modules/ivar/consensus/nextflow.config b/tests/modules/ivar/consensus/nextflow.config new file mode 100644 index 00000000..7407619a --- /dev/null +++ b/tests/modules/ivar/consensus/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: IVAR_CONSENSUS { + ext.args2 = '-aa -A -d 0 -Q 0' + } + +} diff --git a/tests/modules/ivar/consensus/test.yml b/tests/modules/ivar/consensus/test.yml index 071fdc98..caaa640f 100644 --- a/tests/modules/ivar/consensus/test.yml +++ b/tests/modules/ivar/consensus/test.yml @@ -1,5 +1,5 @@ - name: ivar consensus - command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c ./tests/config/nextflow.config -c ./tests/modules/ivar/consensus/nextflow.config tags: - ivar - ivar/consensus diff --git a/tests/modules/ivar/trim/main.nf b/tests/modules/ivar/trim/main.nf index 05b390b0..15d0e739 100644 --- a/tests/modules/ivar/trim/main.nf +++ b/tests/modules/ivar/trim/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' addParams([:]) +include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' workflow test_ivar_trim { input = [ [ id:'test'], diff --git a/tests/modules/ivar/trim/nextflow.config b/tests/modules/ivar/trim/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/trim/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/trim/test.yml b/tests/modules/ivar/trim/test.yml index f2f46676..0be18ba8 100644 --- a/tests/modules/ivar/trim/test.yml +++ b/tests/modules/ivar/trim/test.yml @@ -1,5 +1,5 @@ - name: ivar trim - command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c ./tests/config/nextflow.config -c ./tests/modules/ivar/trim/nextflow.config tags: - ivar - ivar/trim diff --git a/tests/modules/ivar/variants/main.nf b/tests/modules/ivar/variants/main.nf index 5358e785..f603b5e5 100644 --- a/tests/modules/ivar/variants/main.nf +++ b/tests/modules/ivar/variants/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' addParams([:]) +include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' workflow test_ivar_variants_no_gff_no_mpileup { params.gff = false diff --git a/tests/modules/ivar/variants/nextflow.config b/tests/modules/ivar/variants/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/variants/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/variants/test.yml b/tests/modules/ivar/variants/test.yml index a8be12a8..00e6e2c0 100644 --- a/tests/modules/ivar/variants/test.yml +++ b/tests/modules/ivar/variants/test.yml @@ -1,5 +1,5 @@ - name: ivar variants no gff no mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -8,7 +8,7 @@ md5sum: 728f1430f2402861396d9953465ac706 - name: ivar variants no gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c tests/config/nextflow.config --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c ./tests/config/nextflow.config --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -19,7 +19,7 @@ md5sum: 56c4cd5a4ecb7d6364878818f46ae256 - name: ivar variants with gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c ./tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants diff --git a/tests/modules/jupyternotebook/main.nf b/tests/modules/jupyternotebook/main.nf index c1da7e11..1db9d812 100644 --- a/tests/modules/jupyternotebook/main.nf +++ b/tests/modules/jupyternotebook/main.nf @@ -2,15 +2,9 @@ nextflow.enable.dsl = 2 -include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) +include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' workflow test_jupyternotebook { diff --git a/tests/modules/jupyternotebook/nextflow.config b/tests/modules/jupyternotebook/nextflow.config new file mode 100644 index 00000000..6066b2b8 --- /dev/null +++ b/tests/modules/jupyternotebook/nextflow.config @@ -0,0 +1,19 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: JUPYTERNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: JUPYTERNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + + withName: JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/jupyternotebook/test.yml b/tests/modules/jupyternotebook/test.yml index dd4f1175..31fdfdbb 100644 --- a/tests/modules/jupyternotebook/test.yml +++ b/tests/modules/jupyternotebook/test.yml @@ -1,5 +1,5 @@ - name: jupyternotebook test_jupyternotebook - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -8,7 +8,7 @@ - "n_iter = 10" - name: jupyternotebook test_jupyternotebook_parametrize - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -19,7 +19,7 @@ - "n_iter = 12" - name: jupyternotebook test_jupyternotebook_parametrize_ipynb - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: diff --git a/tests/modules/kallisto/index/main.nf b/tests/modules/kallisto/index/main.nf index 7c6078f8..8ecd6d52 100644 --- a/tests/modules/kallisto/index/main.nf +++ b/tests/modules/kallisto/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' addParams( options: [:] ) +include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' workflow test_kallisto_index { diff --git a/tests/modules/kallisto/index/nextflow.config b/tests/modules/kallisto/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallisto/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallisto/index/test.yml b/tests/modules/kallisto/index/test.yml index b9dd23ad..90a06325 100644 --- a/tests/modules/kallisto/index/test.yml +++ b/tests/modules/kallisto/index/test.yml @@ -1,5 +1,5 @@ - name: kallisto index test_kallisto_index - command: nextflow run tests/modules/kallisto/index -entry test_kallisto_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallisto/index -entry test_kallisto_index -c ./tests/config/nextflow.config -c ./tests/modules/kallisto/index/nextflow.config tags: - kallisto - kallisto/index diff --git a/tests/modules/kallistobustools/count/main.nf b/tests/modules/kallistobustools/count/main.nf index 9172ddfc..6e6be03d 100644 --- a/tests/modules/kallistobustools/count/main.nf +++ b/tests/modules/kallistobustools/count/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger -m 1"] ) +include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' workflow test_kallistobustools_count { diff --git a/tests/modules/kallistobustools/count/nextflow.config b/tests/modules/kallistobustools/count/nextflow.config new file mode 100644 index 00000000..eb4e20bd --- /dev/null +++ b/tests/modules/kallistobustools/count/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KALLISTOBUSTOOLS_COUNT { + ext.args = '--cellranger -m 1' + } + +} diff --git a/tests/modules/kallistobustools/count/test.yml b/tests/modules/kallistobustools/count/test.yml index 766d5b57..664e9fa6 100644 --- a/tests/modules/kallistobustools/count/test.yml +++ b/tests/modules/kallistobustools/count/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools count test_kallistobustools_count - command: nextflow run tests/modules/kallistobustools/count -entry test_kallistobustools_count -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/count -entry test_kallistobustools_count -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/count/nextflow.config tags: - kallistobustools/count - kallistobustools diff --git a/tests/modules/kallistobustools/ref/main.nf b/tests/modules/kallistobustools/ref/main.nf index 31b36d0d..09ea68ea 100644 --- a/tests/modules/kallistobustools/ref/main.nf +++ b/tests/modules/kallistobustools/ref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' addParams( options: [:] ) +include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' workflow test_kallistobustools_ref_standard { diff --git a/tests/modules/kallistobustools/ref/nextflow.config b/tests/modules/kallistobustools/ref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallistobustools/ref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallistobustools/ref/test.yml b/tests/modules/kallistobustools/ref/test.yml index 54954085..1e8fd6c4 100644 --- a/tests/modules/kallistobustools/ref/test.yml +++ b/tests/modules/kallistobustools/ref/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools ref test_kallistobustools_ref_standard - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -9,7 +9,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_lamanno - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -22,7 +22,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_nucleus - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools diff --git a/tests/modules/khmer/normalizebymedian/main.nf b/tests/modules/khmer/normalizebymedian/main.nf index 3a3b348c..c439c40f 100644 --- a/tests/modules/khmer/normalizebymedian/main.nf +++ b/tests/modules/khmer/normalizebymedian/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [args: '-C 20 -k 32'] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' +include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' +include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' workflow test_khmer_normalizebymedian_only_pe { diff --git a/tests/modules/khmer/normalizebymedian/nextflow.config b/tests/modules/khmer/normalizebymedian/nextflow.config new file mode 100644 index 00000000..279a972a --- /dev/null +++ b/tests/modules/khmer/normalizebymedian/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KHMER_NORMALIZEBYMEDIAN_ARGS { + ext.args = '-C 20 -k 32' + } + +} diff --git a/tests/modules/khmer/normalizebymedian/test.yml b/tests/modules/khmer/normalizebymedian/test.yml index a914a8ef..0e61588f 100644 --- a/tests/modules/khmer/normalizebymedian/test.yml +++ b/tests/modules/khmer/normalizebymedian/test.yml @@ -1,6 +1,6 @@ # nf-core modules create-test-yml khmer/normalizebymedian - name: khmer normalizebymedian only pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -10,7 +10,7 @@ #md5sum: 75e05f2e80cf4bd0b534d4b73f7c059c - name: khmer normalizebymedian only se reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -18,7 +18,7 @@ - path: output/khmer/only_se.fastq.gz - name: khmer normalizebymedian mixed reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -26,7 +26,7 @@ - path: output/khmer/mixed.fastq.gz - name: khmer normalizebymedian multiple pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -34,7 +34,7 @@ - path: output/khmer/multiple_pe.fastq.gz - name: khmer normalizebymedian args - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian diff --git a/tests/modules/kleborate/main.nf b/tests/modules/kleborate/main.nf index f846e642..bce31225 100644 --- a/tests/modules/kleborate/main.nf +++ b/tests/modules/kleborate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KLEBORATE } from '../../../modules/kleborate/main.nf' addParams( options: [:] ) +include { KLEBORATE } from '../../../modules/kleborate/main.nf' workflow test_kleborate { diff --git a/tests/modules/kleborate/nextflow.config b/tests/modules/kleborate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kleborate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kleborate/test.yml b/tests/modules/kleborate/test.yml index 1bee4708..c7b25778 100644 --- a/tests/modules/kleborate/test.yml +++ b/tests/modules/kleborate/test.yml @@ -1,5 +1,5 @@ - name: kleborate - command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c ./tests/config/nextflow.config -c ./tests/modules/kleborate/nextflow.config tags: - kleborate files: diff --git a/tests/modules/kraken2/kraken2/main.nf b/tests/modules/kraken2/kraken2/main.nf index e5638ec5..12399e9e 100644 --- a/tests/modules/kraken2/kraken2/main.nf +++ b/tests/modules/kraken2/kraken2/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' workflow test_kraken2_kraken2_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/kraken2/kraken2/nextflow.config b/tests/modules/kraken2/kraken2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kraken2/kraken2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kraken2/kraken2/test.yml b/tests/modules/kraken2/kraken2/test.yml index 688fb34c..1ec413bf 100644 --- a/tests/modules/kraken2/kraken2/test.yml +++ b/tests/modules/kraken2/kraken2/test.yml @@ -1,5 +1,5 @@ - name: kraken2 kraken2 single-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 @@ -12,7 +12,7 @@ md5sum: 4227755fe40478b8d7dc8634b489761e - name: kraken2 kraken2 paired-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 diff --git a/tests/modules/krona/kronadb/main.nf b/tests/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ed955854 --- /dev/null +++ b/tests/modules/krona/kronadb/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KRONADB } from '../../../../modules/krona/kronadb/main.nf' + +workflow test_krona_kronadb { + KRONA_KRONADB ( ) +} diff --git a/tests/modules/krona/kronadb/nextflow.config b/tests/modules/krona/kronadb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/kronadb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/kronadb/test.yml b/tests/modules/krona/kronadb/test.yml new file mode 100644 index 00000000..1d61640f --- /dev/null +++ b/tests/modules/krona/kronadb/test.yml @@ -0,0 +1,7 @@ +- name: krona kronadb test_krona_kronadb + command: nextflow run ./tests/modules/krona/kronadb -entry test_krona_kronadb -c ./tests/config/nextflow.config -c ./tests/modules/krona/kronadb/nextflow.config + tags: + - krona + - krona/kronadb + files: + - path: output/krona/taxonomy/taxonomy.tab diff --git a/tests/modules/krona/ktimporttaxonomy/main.nf b/tests/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..a23e6fcb --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KTIMPORTTAXONOMY } from '../../../../modules/krona/ktimporttaxonomy/main.nf' + +workflow test_krona_ktimporttaxonomy { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ] + taxonomy = file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + + KRONA_KTIMPORTTAXONOMY ( input, taxonomy ) +} diff --git a/tests/modules/krona/ktimporttaxonomy/nextflow.config b/tests/modules/krona/ktimporttaxonomy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/ktimporttaxonomy/test.yml b/tests/modules/krona/ktimporttaxonomy/test.yml new file mode 100644 index 00000000..b7748980 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/test.yml @@ -0,0 +1,9 @@ +- name: krona ktimporttaxonomy test_krona_ktimporttaxonomy + command: nextflow run ./tests/modules/krona/ktimporttaxonomy -entry test_krona_ktimporttaxonomy -c ./tests/config/nextflow.config -c ./tests/modules/krona/ktimporttaxonomy/nextflow.config + tags: + - krona/ktimporttaxonomy + - krona + files: + - path: output/krona/taxonomy.krona.html + contains: + - "DOCTYPE html PUBLIC" diff --git a/tests/modules/kronatools/kronadb/main.nf b/tests/modules/kronatools/kronadb/main.nf deleted file mode 100644 index 90b6e30c..00000000 --- a/tests/modules/kronatools/kronadb/main.nf +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { KRONATOOLS_KRONADB } from '../../../../modules/kronatools/kronadb/main.nf' addParams( options: [:] ) - -workflow test_kronatools_kronadb { - KRONATOOLS_KRONADB ( ) -} diff --git a/tests/modules/kronatools/kronadb/test.yml b/tests/modules/kronatools/kronadb/test.yml deleted file mode 100644 index 3f346a9d..00000000 --- a/tests/modules/kronatools/kronadb/test.yml +++ /dev/null @@ -1,7 +0,0 @@ -- name: kronatools kronadb test_kronatools_kronadb - command: nextflow run tests/modules/kronatools/kronadb -entry test_kronatools_kronadb -c tests/config/nextflow.config - tags: - - kronatools - - kronatools/kronadb - files: - - path: output/kronatools/taxonomy/taxonomy.tab diff --git a/tests/modules/kronatools/ktimporttaxonomy/main.nf b/tests/modules/kronatools/ktimporttaxonomy/main.nf deleted file mode 100644 index d7b08a2f..00000000 --- a/tests/modules/kronatools/ktimporttaxonomy/main.nf +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { KRONATOOLS_KTIMPORTTAXONOMY } from '../../../../modules/kronatools/ktimporttaxonomy/main.nf' addParams( options: [:] ) - -workflow test_kronatools_ktimporttaxonomy { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['generic']['txt']['hello'], checkIfExists: true) ] - - taxonomy = [ file(params.test_data['generic']['txt']['hello'] , checkIfExists: true) ] - - KRONATOOLS_KTIMPORTTAXONOMY ( input, taxonomy ) -} diff --git a/tests/modules/kronatools/ktimporttaxonomy/test.yml b/tests/modules/kronatools/ktimporttaxonomy/test.yml deleted file mode 100644 index 15882b2e..00000000 --- a/tests/modules/kronatools/ktimporttaxonomy/test.yml +++ /dev/null @@ -1,9 +0,0 @@ -- name: kronatools ktimporttaxonomy test_kronatools_ktimporttaxonomy - command: nextflow run tests/modules/kronatools/ktimporttaxonomy -entry test_kronatools_ktimporttaxonomy -c tests/config/nextflow.config - tags: - - kronatools/ktimporttaxonomy - - kronatools - files: - - path: output/kronatools/taxonomy.krona.html - contains: - - "DOCTYPE html PUBLIC" diff --git a/tests/modules/last/dotplot/main.nf b/tests/modules/last/dotplot/main.nf index b92ed270..3353821d 100644 --- a/tests/modules/last/dotplot/main.nf +++ b/tests/modules/last/dotplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' addParams( options: [:] ) +include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' workflow test_last_dotplot { diff --git a/tests/modules/last/dotplot/nextflow.config b/tests/modules/last/dotplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/dotplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/dotplot/test.yml b/tests/modules/last/dotplot/test.yml index 177e377b..c2a9910f 100644 --- a/tests/modules/last/dotplot/test.yml +++ b/tests/modules/last/dotplot/test.yml @@ -1,5 +1,5 @@ - name: last dotplot test_last_dotplot - command: nextflow run tests/modules/last/dotplot -entry test_last_dotplot -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/dotplot -entry test_last_dotplot -c ./tests/config/nextflow.config -c ./tests/modules/last/dotplot/nextflow.config tags: - last/dotplot - last diff --git a/tests/modules/last/lastal/main.nf b/tests/modules/last/lastal/main.nf index 262c8f5f..95c2f917 100644 --- a/tests/modules/last/lastal/main.nf +++ b/tests/modules/last/lastal/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' workflow test_last_lastal_with_dummy_param_file { diff --git a/tests/modules/last/lastal/nextflow.config b/tests/modules/last/lastal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/lastal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/lastal/test.yml b/tests/modules/last/lastal/test.yml index 48b0d223..f75e4ac5 100644 --- a/tests/modules/last/lastal/test.yml +++ b/tests/modules/last/lastal/test.yml @@ -1,5 +1,5 @@ - name: last lastal test_last_lastal_with_dummy_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal @@ -22,7 +22,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastal test_last_lastal_with_real_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal diff --git a/tests/modules/last/lastdb/main.nf b/tests/modules/last/lastdb/main.nf index 2f11bee4..d1c7b79a 100644 --- a/tests/modules/last/lastdb/main.nf +++ b/tests/modules/last/lastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' addParams( options: ['args': '-Q0'] ) +include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' workflow test_last_lastdb { diff --git a/tests/modules/last/lastdb/nextflow.config b/tests/modules/last/lastdb/nextflow.config new file mode 100644 index 00000000..9b8b9878 --- /dev/null +++ b/tests/modules/last/lastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_LASTDB { + ext.args = '-Q0' + } + +} diff --git a/tests/modules/last/lastdb/test.yml b/tests/modules/last/lastdb/test.yml index c69ecfac..ece44cf3 100644 --- a/tests/modules/last/lastdb/test.yml +++ b/tests/modules/last/lastdb/test.yml @@ -1,5 +1,5 @@ - name: last lastdb test_last_lastdb - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last @@ -20,7 +20,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastdb test_last_lastdb_gzipped_input - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last diff --git a/tests/modules/last/mafconvert/main.nf b/tests/modules/last/mafconvert/main.nf index 7864c68a..c87f6e6a 100644 --- a/tests/modules/last/mafconvert/main.nf +++ b/tests/modules/last/mafconvert/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' addParams( options: [:] ) +include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' workflow test_last_mafconvert { diff --git a/tests/modules/last/mafconvert/nextflow.config b/tests/modules/last/mafconvert/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafconvert/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafconvert/test.yml b/tests/modules/last/mafconvert/test.yml index 35c65ce9..86a80f20 100644 --- a/tests/modules/last/mafconvert/test.yml +++ b/tests/modules/last/mafconvert/test.yml @@ -1,5 +1,5 @@ - name: last mafconvert test_last_mafconvert - command: nextflow run tests/modules/last/mafconvert -entry test_last_mafconvert -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafconvert -entry test_last_mafconvert -c ./tests/config/nextflow.config -c ./tests/modules/last/mafconvert/nextflow.config tags: - last/mafconvert - last diff --git a/tests/modules/last/mafswap/main.nf b/tests/modules/last/mafswap/main.nf index 3bb72d63..5cc94932 100644 --- a/tests/modules/last/mafswap/main.nf +++ b/tests/modules/last/mafswap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' addParams( options: [:] ) +include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' workflow test_last_mafswap { diff --git a/tests/modules/last/mafswap/nextflow.config b/tests/modules/last/mafswap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafswap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafswap/test.yml b/tests/modules/last/mafswap/test.yml index c7e3778d..a0865e00 100644 --- a/tests/modules/last/mafswap/test.yml +++ b/tests/modules/last/mafswap/test.yml @@ -1,5 +1,5 @@ - name: last mafswap test_last_mafswap - command: nextflow run tests/modules/last/mafswap -entry test_last_mafswap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafswap -entry test_last_mafswap -c ./tests/config/nextflow.config -c ./tests/modules/last/mafswap/nextflow.config tags: - last - last/mafswap diff --git a/tests/modules/last/postmask/main.nf b/tests/modules/last/postmask/main.nf index c30ac806..9bbb10e9 100644 --- a/tests/modules/last/postmask/main.nf +++ b/tests/modules/last/postmask/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' addParams( options: [suffix:'.postmask'] ) +include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' workflow test_last_postmask { diff --git a/tests/modules/last/postmask/nextflow.config b/tests/modules/last/postmask/nextflow.config new file mode 100644 index 00000000..dc021264 --- /dev/null +++ b/tests/modules/last/postmask/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_POSTMASK { + ext.suffix = '.postmask' + } + +} diff --git a/tests/modules/last/postmask/test.yml b/tests/modules/last/postmask/test.yml index 57aea822..81ae7f73 100644 --- a/tests/modules/last/postmask/test.yml +++ b/tests/modules/last/postmask/test.yml @@ -1,5 +1,5 @@ - name: last postmask test_last_postmask - command: nextflow run tests/modules/last/postmask -entry test_last_postmask -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/postmask -entry test_last_postmask -c ./tests/config/nextflow.config -c ./tests/modules/last/postmask/nextflow.config tags: - last - last/postmask diff --git a/tests/modules/last/split/main.nf b/tests/modules/last/split/main.nf index 19d899ab..f4ece4f2 100644 --- a/tests/modules/last/split/main.nf +++ b/tests/modules/last/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' addParams( options: ['suffix':'.split'] ) +include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' workflow test_last_split { diff --git a/tests/modules/last/split/nextflow.config b/tests/modules/last/split/nextflow.config new file mode 100644 index 00000000..8b31ca0f --- /dev/null +++ b/tests/modules/last/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_SPLIT { + ext.suffix = '.split' + } + +} diff --git a/tests/modules/last/split/test.yml b/tests/modules/last/split/test.yml index d57d7477..57eb345f 100644 --- a/tests/modules/last/split/test.yml +++ b/tests/modules/last/split/test.yml @@ -1,5 +1,5 @@ - name: last split test_last_split - command: nextflow run tests/modules/last/split -entry test_last_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/split -entry test_last_split -c ./tests/config/nextflow.config -c ./tests/modules/last/split/nextflow.config tags: - last - last/split diff --git a/tests/modules/last/train/main.nf b/tests/modules/last/train/main.nf index 26e318c3..0f280a82 100644 --- a/tests/modules/last/train/main.nf +++ b/tests/modules/last/train/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' workflow test_last_train { diff --git a/tests/modules/last/train/nextflow.config b/tests/modules/last/train/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/train/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/train/test.yml b/tests/modules/last/train/test.yml index 18eec951..8641600b 100644 --- a/tests/modules/last/train/test.yml +++ b/tests/modules/last/train/test.yml @@ -1,5 +1,5 @@ - name: last train test_last_train - command: nextflow run tests/modules/last/train -entry test_last_train -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/train -entry test_last_train -c ./tests/config/nextflow.config -c ./tests/modules/last/train/nextflow.config tags: - last/train - last diff --git a/tests/modules/leehom/main.nf b/tests/modules/leehom/main.nf index 2fe6f12f..1615d2e1 100644 --- a/tests/modules/leehom/main.nf +++ b/tests/modules/leehom/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { LEEHOM } from '../../../modules/leehom/main.nf' addParams( options: [:] ) -include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: [args: "-f4 -b"] ) +include { LEEHOM } from '../../../modules/leehom/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' workflow test_leehom_bam { diff --git a/tests/modules/leehom/nextflow.config b/tests/modules/leehom/nextflow.config new file mode 100644 index 00000000..25df48cd --- /dev/null +++ b/tests/modules/leehom/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.args = '-f4 -b' + } + +} diff --git a/tests/modules/leehom/test.yml b/tests/modules/leehom/test.yml index 8a9f083e..98257492 100644 --- a/tests/modules/leehom/test.yml +++ b/tests/modules/leehom/test.yml @@ -1,17 +1,15 @@ - name: leehom test_leehom_bam - command: nextflow run tests/modules/leehom -entry test_leehom_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_bam -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: - path: output/leehom/test.bam - md5sum: 19a1bf95714523868791f1d4d3aaee73 + - path: output/samtools/test.bam - path: output/leehom/test.log md5sum: d1f5da273eb69f41babda510797c7671 - - path: output/samtools/test.bam - md5sum: 25d13b3b31b147bb3836dea9932c38dd - name: leehom test_leehom_se_fq - command: nextflow run tests/modules/leehom -entry test_leehom_se_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_se_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: @@ -23,7 +21,7 @@ md5sum: 59aa280cb72dfbea05ba913cb89db143 - name: leehom test_leehom_pe_fq - command: nextflow run tests/modules/leehom -entry test_leehom_pe_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_pe_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: diff --git a/tests/modules/lima/main.nf b/tests/modules/lima/main.nf index df4b2be2..7501def9 100644 --- a/tests/modules/lima/main.nf +++ b/tests/modules/lima/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LIMA } from '../../../modules/lima/main.nf' addParams( options: [args: '--isoseq --peek-guess', suffix: ".fl"] ) +include { LIMA } from '../../../modules/lima/main.nf' workflow test_lima_bam { diff --git a/tests/modules/lima/nextflow.config b/tests/modules/lima/nextflow.config new file mode 100644 index 00000000..5091b034 --- /dev/null +++ b/tests/modules/lima/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LIMA { + ext.args = '--isoseq --peek-guess' + ext.suffix = '.fl' + } + +} diff --git a/tests/modules/lima/test.yml b/tests/modules/lima/test.yml index 1ff860d9..8d927624 100644 --- a/tests/modules/lima/test.yml +++ b/tests/modules/lima/test.yml @@ -1,5 +1,5 @@ - name: lima test_lima_bam - command: nextflow run tests/modules/lima -entry test_lima_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_bam -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -23,7 +23,7 @@ md5sum: bcbcaaaca418bdeb91141c81715ca420 - name: lima test_lima_fa - command: nextflow run tests/modules/lima -entry test_lima_fa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -39,7 +39,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fa_gz - command: nextflow run tests/modules/lima -entry test_lima_fa_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -55,7 +55,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fq - command: nextflow run tests/modules/lima -entry test_lima_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -73,7 +73,7 @@ md5sum: e91d3c386aaf4effa63f33ee2eb7da2a - name: lima test_lima_fq_gz - command: nextflow run tests/modules/lima -entry test_lima_fq_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: diff --git a/tests/modules/lissero/main.nf b/tests/modules/lissero/main.nf index e653bd76..339576c3 100644 --- a/tests/modules/lissero/main.nf +++ b/tests/modules/lissero/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LISSERO } from '../../../modules/lissero/main.nf' addParams( options: [:] ) +include { LISSERO } from '../../../modules/lissero/main.nf' workflow test_lissero { diff --git a/tests/modules/lissero/nextflow.config b/tests/modules/lissero/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lissero/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lissero/test.yml b/tests/modules/lissero/test.yml index 19e79623..8dd7339e 100644 --- a/tests/modules/lissero/test.yml +++ b/tests/modules/lissero/test.yml @@ -1,5 +1,5 @@ - name: lissero test_lissero - command: nextflow run tests/modules/lissero -entry test_lissero -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lissero -entry test_lissero -c ./tests/config/nextflow.config -c ./tests/modules/lissero/nextflow.config tags: - lissero files: diff --git a/tests/modules/lofreq/call/main.nf b/tests/modules/lofreq/call/main.nf index 2c306fd1..70da4ea5 100644 --- a/tests/modules/lofreq/call/main.nf +++ b/tests/modules/lofreq/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' addParams( options: [:] ) +include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' workflow test_lofreq_call { diff --git a/tests/modules/lofreq/call/nextflow.config b/tests/modules/lofreq/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/call/test.yml b/tests/modules/lofreq/call/test.yml index 88700bfe..b9f42542 100644 --- a/tests/modules/lofreq/call/test.yml +++ b/tests/modules/lofreq/call/test.yml @@ -1,5 +1,5 @@ - name: lofreq call test_lofreq_call - command: nextflow run tests/modules/lofreq/call -entry test_lofreq_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/call -entry test_lofreq_call -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/call/nextflow.config tags: - lofreq - lofreq/call diff --git a/tests/modules/lofreq/callparallel/main.nf b/tests/modules/lofreq/callparallel/main.nf index 724bbff1..24ab2db3 100644 --- a/tests/modules/lofreq/callparallel/main.nf +++ b/tests/modules/lofreq/callparallel/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' addParams( options: [:] ) +include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' workflow test_lofreq_callparallel { diff --git a/tests/modules/lofreq/callparallel/nextflow.config b/tests/modules/lofreq/callparallel/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/callparallel/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/callparallel/test.yml b/tests/modules/lofreq/callparallel/test.yml index e09f68c3..db281012 100644 --- a/tests/modules/lofreq/callparallel/test.yml +++ b/tests/modules/lofreq/callparallel/test.yml @@ -1,5 +1,5 @@ - name: lofreq callparallel test_lofreq_callparallel - command: nextflow run tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/callparallel/nextflow.config tags: - lofreq/callparallel - lofreq diff --git a/tests/modules/lofreq/filter/main.nf b/tests/modules/lofreq/filter/main.nf index c5dcea97..bd2a7f54 100644 --- a/tests/modules/lofreq/filter/main.nf +++ b/tests/modules/lofreq/filter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' addParams( options: [:] ) +include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' workflow test_lofreq_filter { diff --git a/tests/modules/lofreq/filter/nextflow.config b/tests/modules/lofreq/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/filter/test.yml b/tests/modules/lofreq/filter/test.yml index 4ee82654..d3ee3812 100644 --- a/tests/modules/lofreq/filter/test.yml +++ b/tests/modules/lofreq/filter/test.yml @@ -1,5 +1,5 @@ - name: lofreq filter test_lofreq_filter - command: nextflow run tests/modules/lofreq/filter -entry test_lofreq_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/filter -entry test_lofreq_filter -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/filter/nextflow.config tags: - lofreq - lofreq/filter diff --git a/tests/modules/lofreq/indelqual/main.nf b/tests/modules/lofreq/indelqual/main.nf index ba0493dd..71652ce1 100644 --- a/tests/modules/lofreq/indelqual/main.nf +++ b/tests/modules/lofreq/indelqual/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' addParams( options: [ 'args': '--dindel', 'suffix':'.indelqual'] ) +include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' workflow test_lofreq_indelqual { diff --git a/tests/modules/lofreq/indelqual/nextflow.config b/tests/modules/lofreq/indelqual/nextflow.config new file mode 100644 index 00000000..b9ad2787 --- /dev/null +++ b/tests/modules/lofreq/indelqual/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LOFREQ_INDELQUAL { + ext.args = '--dindel' + ext.suffix = '.indelqual' + } + +} diff --git a/tests/modules/lofreq/indelqual/test.yml b/tests/modules/lofreq/indelqual/test.yml index f3e73297..6fffb523 100644 --- a/tests/modules/lofreq/indelqual/test.yml +++ b/tests/modules/lofreq/indelqual/test.yml @@ -1,5 +1,5 @@ - name: lofreq indelqual - command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/indelqual/nextflow.config tags: - lofreq - lofreq/indelqual diff --git a/tests/modules/macs2/callpeak/main.nf b/tests/modules/macs2/callpeak/main.nf index db598564..070469dd 100644 --- a/tests/modules/macs2/callpeak/main.nf +++ b/tests/modules/macs2/callpeak/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) -include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) -include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--format BED --qval 1 --nomodel --extsize 200"] ) +include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' workflow test_macs2_callpeak_bed { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/macs2/callpeak/nextflow.config b/tests/modules/macs2/callpeak/nextflow.config new file mode 100644 index 00000000..e3bd3f5d --- /dev/null +++ b/tests/modules/macs2/callpeak/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MACS2_CALLPEAK { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_CTRL { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_BED { + ext.args = '--format BED --qval 1 --nomodel --extsize 200' + } + +} diff --git a/tests/modules/macs2/callpeak/test.yml b/tests/modules/macs2/callpeak/test.yml index 424a9746..43c99140 100644 --- a/tests/modules/macs2/callpeak/test.yml +++ b/tests/modules/macs2/callpeak/test.yml @@ -1,5 +1,5 @@ - name: macs2 callpeak test_macs2_callpeak_bed - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak @@ -12,7 +12,7 @@ md5sum: d41d8cd98f00b204e9800998ecf8427e - name: macs2 callpeak test_macs2_callpeak - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak @@ -25,7 +25,7 @@ md5sum: 26f0f97b6c14dbca129e947a58067c82 - name: macs2 callpeak test_macs2_callpeak_ctrl - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak diff --git a/tests/modules/malt/build_test/main.nf b/tests/modules/malt/build_test/main.nf index b2f3eaf6..2542da0c 100644 --- a/tests/modules/malt/build_test/main.nf +++ b/tests/modules/malt/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' workflow test_malt_build { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/malt/build_test/nextflow.config b/tests/modules/malt/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/build_test/test.yml b/tests/modules/malt/build_test/test.yml index c3ed4b8f..c6694ad5 100644 --- a/tests/modules/malt/build_test/test.yml +++ b/tests/modules/malt/build_test/test.yml @@ -1,5 +1,5 @@ - name: malt build - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build @@ -21,7 +21,7 @@ - path: output/malt/malt_index/taxonomy.tre md5sum: bde26a1fff5c63d3046d3863607a1e70 - name: malt build gff - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf index 6292ca61..292a3fcf 100644 --- a/tests/modules/malt/run/main.nf +++ b/tests/modules/malt/run/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../../modules/malt/run/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../../modules/malt/run/main.nf' workflow test_malt_run { diff --git a/tests/modules/malt/run/nextflow.config b/tests/modules/malt/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml index 0c245f2f..5b0742e4 100644 --- a/tests/modules/malt/run/test.yml +++ b/tests/modules/malt/run/test.yml @@ -1,5 +1,5 @@ - name: malt run - command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c ./tests/config/nextflow.config -c ./tests/modules/malt/run/nextflow.config tags: - malt - malt/run diff --git a/tests/modules/maltextract/main.nf b/tests/modules/maltextract/main.nf index d18923ca..8e0a2241 100644 --- a/tests/modules/maltextract/main.nf +++ b/tests/modules/maltextract/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' workflow test_maltextract { diff --git a/tests/modules/maltextract/nextflow.config b/tests/modules/maltextract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maltextract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maltextract/test.yml b/tests/modules/maltextract/test.yml index 87bf0182..2440c100 100644 --- a/tests/modules/maltextract/test.yml +++ b/tests/modules/maltextract/test.yml @@ -1,5 +1,5 @@ - name: maltextract - command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c ./tests/config/nextflow.config -c ./tests/modules/maltextract/nextflow.config tags: - maltextract files: diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index df996464..f8adedb0 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' addParams( options: [:] ) +include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' workflow test_manta_germline { input = [ diff --git a/tests/modules/manta/germline/nextflow.config b/tests/modules/manta/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/germline/test.yml b/tests/modules/manta/germline/test.yml index b4086d76..c6ead9eb 100644 --- a/tests/modules/manta/germline/test.yml +++ b/tests/modules/manta/germline/test.yml @@ -1,5 +1,5 @@ - name: manta germline - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline @@ -11,7 +11,7 @@ - path: output/manta/test.diploid_sv.vcf.gz - path: output/manta/test.diploid_sv.vcf.gz.tbi - name: manta germline target bed - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline diff --git a/tests/modules/manta/somatic/main.nf b/tests/modules/manta/somatic/main.nf index 553735c9..7da41bea 100644 --- a/tests/modules/manta/somatic/main.nf +++ b/tests/modules/manta/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' addParams( options: [:] ) +include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' workflow test_manta_somatic { diff --git a/tests/modules/manta/somatic/nextflow.config b/tests/modules/manta/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/somatic/test.yml b/tests/modules/manta/somatic/test.yml index 72f0953d..d701a210 100644 --- a/tests/modules/manta/somatic/test.yml +++ b/tests/modules/manta/somatic/test.yml @@ -1,5 +1,5 @@ - name: manta somatic test_manta_somatic - command: nextflow run tests/modules/manta/somatic -entry test_manta_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/somatic -entry test_manta_somatic -c ./tests/config/nextflow.config -c ./tests/modules/manta/somatic/nextflow.config tags: - manta/somatic - manta diff --git a/tests/modules/manta/tumoronly/main.nf b/tests/modules/manta/tumoronly/main.nf index 436ab781..be0d3dbb 100644 --- a/tests/modules/manta/tumoronly/main.nf +++ b/tests/modules/manta/tumoronly/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' addParams( options: [:] ) +include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' workflow test_manta_tumoronly { input = [ diff --git a/tests/modules/manta/tumoronly/nextflow.config b/tests/modules/manta/tumoronly/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/tumoronly/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/tumoronly/test.yml b/tests/modules/manta/tumoronly/test.yml index 13f2cde1..c56e23fa 100644 --- a/tests/modules/manta/tumoronly/test.yml +++ b/tests/modules/manta/tumoronly/test.yml @@ -1,5 +1,5 @@ - name: manta tumoronly - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly @@ -11,7 +11,7 @@ - path: output/manta/test.tumor_sv.vcf.gz - path: output/manta/test.tumor_sv.vcf.gz.tbi - name: manta tumoronly target bed - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly diff --git a/tests/modules/mapdamage2/main.nf b/tests/modules/mapdamage2/main.nf index a4a0eb02..b7e4d23b 100644 --- a/tests/modules/mapdamage2/main.nf +++ b/tests/modules/mapdamage2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' addParams( options: [:] ) +include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' workflow test_mapdamage2 { diff --git a/tests/modules/mapdamage2/nextflow.config b/tests/modules/mapdamage2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mapdamage2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mapdamage2/test.yml b/tests/modules/mapdamage2/test.yml index 657f59b5..96c8b2da 100644 --- a/tests/modules/mapdamage2/test.yml +++ b/tests/modules/mapdamage2/test.yml @@ -1,5 +1,5 @@ - name: mapdamage2 test_mapdamage2 - command: nextflow run tests/modules/mapdamage2 -entry test_mapdamage2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mapdamage2 -entry test_mapdamage2 -c ./tests/config/nextflow.config -c ./tests/modules/mapdamage2/nextflow.config tags: - mapdamage2 files: diff --git a/tests/modules/mash/sketch/main.nf b/tests/modules/mash/sketch/main.nf index da72d1e3..cec2035b 100644 --- a/tests/modules/mash/sketch/main.nf +++ b/tests/modules/mash/sketch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' addParams( options: [:] ) +include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' workflow test_mash_sketch { diff --git a/tests/modules/mash/sketch/nextflow.config b/tests/modules/mash/sketch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mash/sketch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mash/sketch/test.yml b/tests/modules/mash/sketch/test.yml index 78f4598b..d5039956 100644 --- a/tests/modules/mash/sketch/test.yml +++ b/tests/modules/mash/sketch/test.yml @@ -1,5 +1,5 @@ - name: mash sketch - command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c ./tests/config/nextflow.config -c ./tests/modules/mash/sketch/nextflow.config tags: - mash/sketch files: diff --git a/tests/modules/mashtree/main.nf b/tests/modules/mashtree/main.nf index 47a7c12a..07f5e561 100644 --- a/tests/modules/mashtree/main.nf +++ b/tests/modules/mashtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASHTREE } from '../../../modules/mashtree/main.nf' addParams( options: [:] ) +include { MASHTREE } from '../../../modules/mashtree/main.nf' workflow test_mashtree { diff --git a/tests/modules/mashtree/nextflow.config b/tests/modules/mashtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mashtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mashtree/test.yml b/tests/modules/mashtree/test.yml index 83ff6272..bea9638c 100644 --- a/tests/modules/mashtree/test.yml +++ b/tests/modules/mashtree/test.yml @@ -1,5 +1,5 @@ - name: mashtree test_mashtree - command: nextflow run tests/modules/mashtree -entry test_mashtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mashtree -entry test_mashtree -c ./tests/config/nextflow.config -c ./tests/modules/mashtree/nextflow.config tags: - mashtree files: diff --git a/tests/modules/maxbin2/main.nf b/tests/modules/maxbin2/main.nf index bede2c6a..3df417be 100644 --- a/tests/modules/maxbin2/main.nf +++ b/tests/modules/maxbin2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' addParams( options: [:] ) +include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' workflow test_maxbin2 { diff --git a/tests/modules/maxbin2/nextflow.config b/tests/modules/maxbin2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maxbin2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maxbin2/test.yml b/tests/modules/maxbin2/test.yml index 2721d17a..a8ba98f9 100644 --- a/tests/modules/maxbin2/test.yml +++ b/tests/modules/maxbin2/test.yml @@ -1,5 +1,5 @@ - name: maxbin2 - command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c ./tests/config/nextflow.config -c ./tests/modules/maxbin2/nextflow.config tags: - maxbin2 files: diff --git a/tests/modules/medaka/main.nf b/tests/modules/medaka/main.nf index 300e086b..75fc135b 100644 --- a/tests/modules/medaka/main.nf +++ b/tests/modules/medaka/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MEDAKA } from '../../../modules/medaka/main.nf' addParams( options: [suffix:'.polished.genome'] ) +include { MEDAKA } from '../../../modules/medaka/main.nf' workflow test_medaka { diff --git a/tests/modules/medaka/nextflow.config b/tests/modules/medaka/nextflow.config new file mode 100644 index 00000000..1f89be62 --- /dev/null +++ b/tests/modules/medaka/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MEDAKA { + ext.suffix = '.polished.genome' + } + +} diff --git a/tests/modules/medaka/test.yml b/tests/modules/medaka/test.yml index 9ce5521e..54146bdc 100644 --- a/tests/modules/medaka/test.yml +++ b/tests/modules/medaka/test.yml @@ -1,5 +1,5 @@ - name: medaka test_medaka - command: nextflow run ./tests/modules/medaka -entry test_medaka -c tests/config/nextflow.config + command: nextflow run ./tests/modules/medaka -entry test_medaka -c ./tests/config/nextflow.config -c ./tests/modules/medaka/nextflow.config tags: - medaka files: diff --git a/tests/modules/megahit/main.nf b/tests/modules/megahit/main.nf index dcf07cd6..88acf3e3 100644 --- a/tests/modules/megahit/main.nf +++ b/tests/modules/megahit/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MEGAHIT } from '../../../modules/megahit/main.nf' addParams( options: [:] ) +include { MEGAHIT } from '../../../modules/megahit/main.nf' workflow test_megahit { diff --git a/tests/modules/megahit/nextflow.config b/tests/modules/megahit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/megahit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/megahit/test.yml b/tests/modules/megahit/test.yml index c390891b..2072ac12 100644 --- a/tests/modules/megahit/test.yml +++ b/tests/modules/megahit/test.yml @@ -1,5 +1,5 @@ - name: megahit - command: nextflow run ./tests/modules/megahit -entry test_megahit -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: @@ -31,7 +31,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: megahit_single - command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: diff --git a/tests/modules/meningotype/main.nf b/tests/modules/meningotype/main.nf index d660ec72..a2d0ff10 100644 --- a/tests/modules/meningotype/main.nf +++ b/tests/modules/meningotype/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' addParams( options: [:] ) +include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' workflow test_meningotype { diff --git a/tests/modules/meningotype/nextflow.config b/tests/modules/meningotype/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/meningotype/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/meningotype/test.yml b/tests/modules/meningotype/test.yml index c61e78a6..02ec8e1f 100644 --- a/tests/modules/meningotype/test.yml +++ b/tests/modules/meningotype/test.yml @@ -1,5 +1,5 @@ - name: meningotype test_meningotype - command: nextflow run tests/modules/meningotype -entry test_meningotype -c tests/config/nextflow.config + command: nextflow run ./tests/modules/meningotype -entry test_meningotype -c ./tests/config/nextflow.config -c ./tests/modules/meningotype/nextflow.config tags: - meningotype files: diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf index 2cfc2e2c..00309402 100644 --- a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' workflow test_metabat2_jgisummarizebamcontigdepths { diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml index d318c6d4..86c49d26 100644 --- a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml @@ -1,5 +1,5 @@ - name: metabat2 jgisummarizebamcontigdepths test_metabat2_jgisummarizebamcontigdepths - command: nextflow run tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config tags: - metabat2/jgisummarizebamcontigdepths - metabat2 diff --git a/tests/modules/metabat2/metabat2/main.nf b/tests/modules/metabat2/metabat2/main.nf index 3d01f194..0179e4c3 100644 --- a/tests/modules/metabat2/metabat2/main.nf +++ b/tests/modules/metabat2/metabat2/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' workflow test_metabat2_no_depth { diff --git a/tests/modules/metabat2/metabat2/nextflow.config b/tests/modules/metabat2/metabat2/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/metabat2/metabat2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml index 7b3435b7..1a8660a7 100644 --- a/tests/modules/metabat2/metabat2/test.yml +++ b/tests/modules/metabat2/metabat2/test.yml @@ -1,5 +1,5 @@ - name: metabat2 metabat2 test_metabat2_no_depth - command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config tags: - metabat2/metabat2 - metabat2 @@ -10,7 +10,7 @@ md5sum: ea77e8c4426d2337419905b57f1ec335 - name: metabat2 metabat2 test_metabat2_depth - command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config tags: - metabat2/metabat2 - metabat2 diff --git a/tests/modules/metaphlan3/main.nf b/tests/modules/metaphlan3/main.nf index 2d855683..3354d2d9 100644 --- a/tests/modules/metaphlan3/main.nf +++ b/tests/modules/metaphlan3/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) -include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: ['suffix': '.sam'] ) -include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' addParams( options: [ 'args':'--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' ] ) +include { UNTAR } from '../../../modules/untar/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' +include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' workflow test_metaphlan3_single_end { @@ -42,7 +42,7 @@ workflow test_metaphlan3_sam { UNTAR ( db ) - SAMTOOLS_VIEW ( input ) + SAMTOOLS_VIEW ( input, [] ) METAPHLAN3 ( SAMTOOLS_VIEW.out.bam, UNTAR.out.untar ) } diff --git a/tests/modules/metaphlan3/nextflow.config b/tests/modules/metaphlan3/nextflow.config new file mode 100644 index 00000000..2dde2212 --- /dev/null +++ b/tests/modules/metaphlan3/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.suffix = '.sam' + } + + withName: METAPHLAN3 { + ext.args = '--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' + } + +} diff --git a/tests/modules/metaphlan3/test.yml b/tests/modules/metaphlan3/test.yml index fbd5e70b..92e731d2 100644 --- a/tests/modules/metaphlan3/test.yml +++ b/tests/modules/metaphlan3/test.yml @@ -1,5 +1,5 @@ - name: metaphlan3 test_metaphlan3_single_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -30,7 +30,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_paired_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -61,7 +61,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_sam - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_sam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_sam -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -92,7 +92,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_fasta - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: diff --git a/tests/modules/methyldackel/extract/main.nf b/tests/modules/methyldackel/extract/main.nf index 40e87b0b..92f92308 100644 --- a/tests/modules/methyldackel/extract/main.nf +++ b/tests/modules/methyldackel/extract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' workflow test_methyldackel_extract { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/extract/nextflow.config b/tests/modules/methyldackel/extract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/extract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/extract/test.yml b/tests/modules/methyldackel/extract/test.yml index 70c371d7..28f969f3 100644 --- a/tests/modules/methyldackel/extract/test.yml +++ b/tests/modules/methyldackel/extract/test.yml @@ -1,5 +1,5 @@ - name: methyldackel extract - command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/extract/nextflow.config tags: - methyldackel - methyldackel/extract diff --git a/tests/modules/methyldackel/mbias/main.nf b/tests/modules/methyldackel/mbias/main.nf index 318dd663..f304e22f 100644 --- a/tests/modules/methyldackel/mbias/main.nf +++ b/tests/modules/methyldackel/mbias/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' workflow test_methyldackel_mbias { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/mbias/nextflow.config b/tests/modules/methyldackel/mbias/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/mbias/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/mbias/test.yml b/tests/modules/methyldackel/mbias/test.yml index 43074291..8bb23f24 100644 --- a/tests/modules/methyldackel/mbias/test.yml +++ b/tests/modules/methyldackel/mbias/test.yml @@ -1,5 +1,5 @@ - name: methyldackel mbias - command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/mbias/nextflow.config tags: - methyldackel - methyldackel/mbias diff --git a/tests/modules/minia/main.nf b/tests/modules/minia/main.nf index e23f5cc4..5be4d17f 100644 --- a/tests/modules/minia/main.nf +++ b/tests/modules/minia/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIA } from '../../../modules/minia/main.nf' addParams( options: [:] ) +include { MINIA } from '../../../modules/minia/main.nf' workflow test_minia { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/minia/nextflow.config b/tests/modules/minia/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minia/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minia/test.yml b/tests/modules/minia/test.yml index 6836f51d..78b84f37 100644 --- a/tests/modules/minia/test.yml +++ b/tests/modules/minia/test.yml @@ -1,5 +1,5 @@ - name: minia - command: nextflow run tests/modules/minia -entry test_minia -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minia -entry test_minia -c ./tests/config/nextflow.config -c ./tests/modules/minia/nextflow.config tags: - minia files: diff --git a/tests/modules/miniasm/main.nf b/tests/modules/miniasm/main.nf index f3d23d56..949660ac 100644 --- a/tests/modules/miniasm/main.nf +++ b/tests/modules/miniasm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIASM } from '../../../modules/miniasm/main.nf' addParams( options: [suffix:'.assembly'] ) +include { MINIASM } from '../../../modules/miniasm/main.nf' workflow test_miniasm { diff --git a/tests/modules/miniasm/nextflow.config b/tests/modules/miniasm/nextflow.config new file mode 100644 index 00000000..844a0120 --- /dev/null +++ b/tests/modules/miniasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MINIASM { + ext.suffix = '.assembly' + } + +} diff --git a/tests/modules/miniasm/test.yml b/tests/modules/miniasm/test.yml index 7596a269..0bdc350a 100644 --- a/tests/modules/miniasm/test.yml +++ b/tests/modules/miniasm/test.yml @@ -1,5 +1,5 @@ - name: miniasm test_miniasm - command: nextflow run tests/modules/miniasm -entry test_miniasm -c tests/config/nextflow.config + command: nextflow run ./tests/modules/miniasm -entry test_miniasm -c ./tests/config/nextflow.config -c ./tests/modules/miniasm/nextflow.config tags: - miniasm files: diff --git a/tests/modules/minimap2/align/main.nf b/tests/modules/minimap2/align/main.nf index b4dbf5bd..e507d3e5 100644 --- a/tests/modules/minimap2/align/main.nf +++ b/tests/modules/minimap2/align/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' addParams( options: [:] ) +include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' workflow test_minimap2_align_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/minimap2/align/nextflow.config b/tests/modules/minimap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index 3309bf4b..598a5d25 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -1,5 +1,5 @@ - name: minimap2 align single-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align @@ -8,7 +8,7 @@ md5sum: 70e8cf299ee3ecd33e629d10c1f588ce - name: minimap2 align paired-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align diff --git a/tests/modules/minimap2/index/main.nf b/tests/modules/minimap2/index/main.nf index 39aa93e0..a69efa85 100644 --- a/tests/modules/minimap2/index/main.nf +++ b/tests/modules/minimap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' addParams( options: [:] ) +include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' workflow test_minimap2_index { diff --git a/tests/modules/minimap2/index/nextflow.config b/tests/modules/minimap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/index/test.yml b/tests/modules/minimap2/index/test.yml index 7a3cc8fa..95700452 100644 --- a/tests/modules/minimap2/index/test.yml +++ b/tests/modules/minimap2/index/test.yml @@ -1,5 +1,5 @@ - name: minimap2 index - command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/index/nextflow.config tags: - minimap2 - minimap2/index diff --git a/tests/modules/mlst/main.nf b/tests/modules/mlst/main.nf index 4b7d44be..f84ec622 100644 --- a/tests/modules/mlst/main.nf +++ b/tests/modules/mlst/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MLST } from '../../../modules/mlst/main.nf' addParams( options: [:] ) +include { MLST } from '../../../modules/mlst/main.nf' workflow test_mlst { diff --git a/tests/modules/mlst/nextflow.config b/tests/modules/mlst/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mlst/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mlst/test.yml b/tests/modules/mlst/test.yml index 5a7c7a0e..53eacc5a 100644 --- a/tests/modules/mlst/test.yml +++ b/tests/modules/mlst/test.yml @@ -1,5 +1,5 @@ - name: mlst test_mlst - command: nextflow run tests/modules/mlst -entry test_mlst -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mlst -entry test_mlst -c ./tests/config/nextflow.config -c ./tests/modules/mlst/nextflow.config tags: - mlst files: diff --git a/tests/modules/mosdepth/main.nf b/tests/modules/mosdepth/main.nf index c4d8e9c4..8862204d 100644 --- a/tests/modules/mosdepth/main.nf +++ b/tests/modules/mosdepth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' addParams( options: [:] ) +include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' workflow test_mosdepth { input = [ [ id:'test', single_end:true ], diff --git a/tests/modules/mosdepth/nextflow.config b/tests/modules/mosdepth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mosdepth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mosdepth/test.yml b/tests/modules/mosdepth/test.yml index f5ab5608..e264ef3b 100644 --- a/tests/modules/mosdepth/test.yml +++ b/tests/modules/mosdepth/test.yml @@ -1,5 +1,5 @@ - name: mosdepth - command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config tags: - mosdepth files: diff --git a/tests/modules/msisensor/msi/main.nf b/tests/modules/msisensor/msi/main.nf index f8ce4187..259ec887 100644 --- a/tests/modules/msisensor/msi/main.nf +++ b/tests/modules/msisensor/msi/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) -include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' +include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' workflow test_msisensor_msi { diff --git a/tests/modules/msisensor/msi/nextflow.config b/tests/modules/msisensor/msi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/msi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/msi/test.yml b/tests/modules/msisensor/msi/test.yml index 1fc74ad3..0d0da1ee 100644 --- a/tests/modules/msisensor/msi/test.yml +++ b/tests/modules/msisensor/msi/test.yml @@ -1,5 +1,5 @@ - name: msisensor msi - command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/msi/nextflow.config tags: - msisensor - msisensor/msi diff --git a/tests/modules/msisensor/scan/main.nf b/tests/modules/msisensor/scan/main.nf index 2303d0b9..de46dd9b 100644 --- a/tests/modules/msisensor/scan/main.nf +++ b/tests/modules/msisensor/scan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' workflow test_msisensor_scan { diff --git a/tests/modules/msisensor/scan/nextflow.config b/tests/modules/msisensor/scan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/scan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/scan/test.yml b/tests/modules/msisensor/scan/test.yml index 0d28c5a2..9e697a59 100644 --- a/tests/modules/msisensor/scan/test.yml +++ b/tests/modules/msisensor/scan/test.yml @@ -1,5 +1,5 @@ - name: msisensor scan - command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/scan/nextflow.config tags: - msisensor - msisensor/scan diff --git a/tests/modules/mtnucratio/main.nf b/tests/modules/mtnucratio/main.nf index dd9fc9db..6d6f5e1d 100644 --- a/tests/modules/mtnucratio/main.nf +++ b/tests/modules/mtnucratio/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' addParams( options: [:] ) +include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' workflow test_mtnucratio { diff --git a/tests/modules/mtnucratio/nextflow.config b/tests/modules/mtnucratio/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mtnucratio/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mtnucratio/test.yml b/tests/modules/mtnucratio/test.yml index 76cbaf32..24dc3d16 100644 --- a/tests/modules/mtnucratio/test.yml +++ b/tests/modules/mtnucratio/test.yml @@ -1,5 +1,5 @@ - name: mtnucratio - command: nextflow run tests/modules/mtnucratio -entry test_mtnucratio -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mtnucratio -entry test_mtnucratio -c ./tests/config/nextflow.config -c ./tests/modules/mtnucratio/nextflow.config tags: - mtnucratio files: diff --git a/tests/modules/multiqc/main.nf b/tests/modules/multiqc/main.nf index ddabb43a..43643985 100644 --- a/tests/modules/multiqc/main.nf +++ b/tests/modules/multiqc/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../modules/multiqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../modules/multiqc/main.nf' workflow test_multiqc { input = [ [ id: 'test', single_end: false ], diff --git a/tests/modules/multiqc/nextflow.config b/tests/modules/multiqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/multiqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/multiqc/test.yml b/tests/modules/multiqc/test.yml index 69ded5d5..39796872 100644 --- a/tests/modules/multiqc/test.yml +++ b/tests/modules/multiqc/test.yml @@ -1,5 +1,5 @@ - name: multiqc - command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c ./tests/config/nextflow.config -c ./tests/modules/multiqc/nextflow.config tags: - multiqc files: diff --git a/tests/modules/mummer/main.nf b/tests/modules/mummer/main.nf index b24f8b16..30c8c4b8 100644 --- a/tests/modules/mummer/main.nf +++ b/tests/modules/mummer/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MUMMER } from '../../../modules/mummer/main.nf' addParams( options: [:] ) +include { MUMMER } from '../../../modules/mummer/main.nf' workflow test_mummer { diff --git a/tests/modules/mummer/nextflow.config b/tests/modules/mummer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mummer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mummer/test.yml b/tests/modules/mummer/test.yml index 1d368d14..359fd4ad 100644 --- a/tests/modules/mummer/test.yml +++ b/tests/modules/mummer/test.yml @@ -1,5 +1,5 @@ - name: mummer test_mummer - command: nextflow run tests/modules/mummer -entry test_mummer -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mummer -entry test_mummer -c ./tests/config/nextflow.config -c ./tests/modules/mummer/nextflow.config tags: - mummer files: diff --git a/tests/modules/muscle/main.nf b/tests/modules/muscle/main.nf index 81a71761..a6294519 100644 --- a/tests/modules/muscle/main.nf +++ b/tests/modules/muscle/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MUSCLE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-fasta -verbose -phys -phyi -maxiters 2']) -include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-maketree']) +include { MUSCLE } from '../../../modules/muscle/main.nf' +include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' workflow test_muscle { diff --git a/tests/modules/muscle/nextflow.config b/tests/modules/muscle/nextflow.config new file mode 100644 index 00000000..31331b0f --- /dev/null +++ b/tests/modules/muscle/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MUSCLE { + ext.args = '-fasta -verbose -phys -phyi -maxiters 2' + } + + withName: MUSCLE_TREE { + ext.args = '-maketree' + } + +} diff --git a/tests/modules/muscle/test.yml b/tests/modules/muscle/test.yml index 7f9d2a54..6995d71d 100644 --- a/tests/modules/muscle/test.yml +++ b/tests/modules/muscle/test.yml @@ -1,5 +1,5 @@ - name: muscle test_muscle - command: nextflow run tests/modules/muscle -entry test_muscle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/muscle -entry test_muscle -c ./tests/config/nextflow.config -c ./tests/modules/muscle/nextflow.config tags: - muscle files: diff --git a/tests/modules/nanolyse/main.nf b/tests/modules/nanolyse/main.nf index 97941a6d..91013cd0 100644 --- a/tests/modules/nanolyse/main.nf +++ b/tests/modules/nanolyse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOLYSE } from '../../../modules/nanolyse/main.nf' addParams( options: [suffix: '.clean'] ) +include { NANOLYSE } from '../../../modules/nanolyse/main.nf' workflow test_nanolyse { input = [ diff --git a/tests/modules/nanolyse/nextflow.config b/tests/modules/nanolyse/nextflow.config new file mode 100644 index 00000000..ede080cc --- /dev/null +++ b/tests/modules/nanolyse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NANOLYSE { + ext.suffix = '.clean' + } + +} diff --git a/tests/modules/nanolyse/test.yml b/tests/modules/nanolyse/test.yml index 4938fe57..5af2e65e 100644 --- a/tests/modules/nanolyse/test.yml +++ b/tests/modules/nanolyse/test.yml @@ -1,5 +1,5 @@ - name: nanolyse - command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c ./tests/config/nextflow.config -c ./tests/modules/nanolyse/nextflow.config tags: - nanolyse files: diff --git a/tests/modules/nanoplot/main.nf b/tests/modules/nanoplot/main.nf index a483f5e2..04c923c2 100644 --- a/tests/modules/nanoplot/main.nf +++ b/tests/modules/nanoplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOPLOT } from '../../../modules/nanoplot/main.nf' addParams( options: [:] ) +include { NANOPLOT } from '../../../modules/nanoplot/main.nf' workflow test_nanoplot_summary { def input = [] diff --git a/tests/modules/nanoplot/nextflow.config b/tests/modules/nanoplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nanoplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nanoplot/test.yml b/tests/modules/nanoplot/test.yml index 475b90c9..6549953e 100644 --- a/tests/modules/nanoplot/test.yml +++ b/tests/modules/nanoplot/test.yml @@ -1,6 +1,6 @@ - name: nanoplot_summary - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: @@ -8,7 +8,7 @@ contains: - "report" - name: nanoplot_fastq - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: diff --git a/tests/modules/ncbigenomedownload/main.nf b/tests/modules/ncbigenomedownload/main.nf index f729b91d..2447b97c 100644 --- a/tests/modules/ncbigenomedownload/main.nf +++ b/tests/modules/ncbigenomedownload/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' addParams( options: [ args: '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria '] ) +include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' workflow test_ncbigenomedownload { diff --git a/tests/modules/ncbigenomedownload/nextflow.config b/tests/modules/ncbigenomedownload/nextflow.config new file mode 100644 index 00000000..7e6ccf70 --- /dev/null +++ b/tests/modules/ncbigenomedownload/nextflow.config @@ -0,0 +1,8 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NCBIGENOMEDOWNLOAD { + ext.args = '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria' + } +} diff --git a/tests/modules/ncbigenomedownload/test.yml b/tests/modules/ncbigenomedownload/test.yml index 7d1f7c74..8765e04f 100644 --- a/tests/modules/ncbigenomedownload/test.yml +++ b/tests/modules/ncbigenomedownload/test.yml @@ -1,5 +1,5 @@ - name: ncbigenomedownload test_ncbigenomedownload - command: nextflow run tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c ./tests/config/nextflow.config -c ./tests/modules/ncbigenomedownload/nextflow.config tags: - ncbigenomedownload files: diff --git a/tests/modules/nextclade/main.nf b/tests/modules/nextclade/main.nf index 93c50ca5..15750990 100755 --- a/tests/modules/nextclade/main.nf +++ b/tests/modules/nextclade/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NEXTCLADE } from '../../../modules/nextclade/main.nf' addParams( options: [:] ) +include { NEXTCLADE } from '../../../modules/nextclade/main.nf' workflow test_nextclade { input = [ diff --git a/tests/modules/nextclade/nextflow.config b/tests/modules/nextclade/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nextclade/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nextclade/test.yml b/tests/modules/nextclade/test.yml index 4d1d7743..36218aad 100755 --- a/tests/modules/nextclade/test.yml +++ b/tests/modules/nextclade/test.yml @@ -1,5 +1,5 @@ - name: nextclade test_nextclade - command: nextflow run tests/modules/nextclade -entry test_nextclade -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nextclade -entry test_nextclade -c ./tests/config/nextflow.config -c ./tests/modules/nextclade/nextflow.config tags: - nextclade files: diff --git a/tests/modules/ngmaster/main.nf b/tests/modules/ngmaster/main.nf index 8bc975ed..b23530bc 100644 --- a/tests/modules/ngmaster/main.nf +++ b/tests/modules/ngmaster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NGMASTER } from '../../../modules/ngmaster/main.nf' addParams( options: [:] ) +include { NGMASTER } from '../../../modules/ngmaster/main.nf' workflow test_ngmaster { diff --git a/tests/modules/ngmaster/nextflow.config b/tests/modules/ngmaster/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ngmaster/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ngmaster/test.yml b/tests/modules/ngmaster/test.yml index 31584a54..fb8dec82 100644 --- a/tests/modules/ngmaster/test.yml +++ b/tests/modules/ngmaster/test.yml @@ -1,5 +1,5 @@ - name: ngmaster test_ngmaster - command: nextflow run tests/modules/ngmaster -entry test_ngmaster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ngmaster -entry test_ngmaster -c ./tests/config/nextflow.config -c ./tests/modules/ngmaster/nextflow.config tags: - ngmaster files: diff --git a/tests/modules/nucmer/main.nf b/tests/modules/nucmer/main.nf index 8021f577..98e74b07 100644 --- a/tests/modules/nucmer/main.nf +++ b/tests/modules/nucmer/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NUCMER } from '../../../modules/nucmer/main.nf' addParams( options: [:] ) +include { NUCMER } from '../../../modules/nucmer/main.nf' workflow test_nucmer { diff --git a/tests/modules/nucmer/nextflow.config b/tests/modules/nucmer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nucmer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nucmer/test.yml b/tests/modules/nucmer/test.yml index 86b3df5d..62caced4 100644 --- a/tests/modules/nucmer/test.yml +++ b/tests/modules/nucmer/test.yml @@ -1,5 +1,5 @@ - name: nucmer test_nucmer - command: nextflow run tests/modules/nucmer -entry test_nucmer -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nucmer -entry test_nucmer -c ./tests/config/nextflow.config -c ./tests/modules/nucmer/nextflow.config tags: - nucmer files: diff --git a/tests/modules/optitype/main.nf b/tests/modules/optitype/main.nf index c27a5c99..55b46f0a 100644 --- a/tests/modules/optitype/main.nf +++ b/tests/modules/optitype/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { OPTITYPE } from '../../../modules/optitype/main.nf' addParams( options: ['args':'-e 1 -b 0.009', 'args2':'solver=glpk'] ) +include { OPTITYPE } from '../../../modules/optitype/main.nf' workflow test_optitype { input = [ [ id:'test', seq_type:'dna' ], // meta map diff --git a/tests/modules/optitype/nextflow.config b/tests/modules/optitype/nextflow.config new file mode 100644 index 00000000..14ad9e3f --- /dev/null +++ b/tests/modules/optitype/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: OPTITYPE { + ext.args = '-e 1 -b 0.009' + ext.args2 = 'solver=glpk' + } + +} diff --git a/tests/modules/optitype/test.yml b/tests/modules/optitype/test.yml index 41f35988..7c2ff0d0 100644 --- a/tests/modules/optitype/test.yml +++ b/tests/modules/optitype/test.yml @@ -1,5 +1,5 @@ - name: optitype test_optitype - command: nextflow run tests/modules/optitype -entry test_optitype -c tests/config/nextflow.config + command: nextflow run ./tests/modules/optitype -entry test_optitype -c ./tests/config/nextflow.config -c ./tests/modules/optitype/nextflow.config tags: - optitype files: diff --git a/tests/modules/pairix/main.nf b/tests/modules/pairix/main.nf index f1e2a44a..474bacbb 100644 --- a/tests/modules/pairix/main.nf +++ b/tests/modules/pairix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRIX } from '../../../modules/pairix/main.nf' addParams( options: [:] ) +include { PAIRIX } from '../../../modules/pairix/main.nf' workflow test_pairix { diff --git a/tests/modules/pairix/nextflow.config b/tests/modules/pairix/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairix/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairix/test.yml b/tests/modules/pairix/test.yml index 304a94b3..4cd9d37d 100644 --- a/tests/modules/pairix/test.yml +++ b/tests/modules/pairix/test.yml @@ -1,5 +1,5 @@ - name: pairix test_pairix - command: nextflow run tests/modules/pairix -entry test_pairix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairix -entry test_pairix -c ./tests/config/nextflow.config -c ./tests/modules/pairix/nextflow.config tags: - pairix files: diff --git a/tests/modules/pairtools/dedup/main.nf b/tests/modules/pairtools/dedup/main.nf index 2c10c85b..28121526 100644 --- a/tests/modules/pairtools/dedup/main.nf +++ b/tests/modules/pairtools/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' addParams( options: ['suffix':'.dedup'] ) +include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' workflow test_pairtools_dedup { diff --git a/tests/modules/pairtools/dedup/nextflow.config b/tests/modules/pairtools/dedup/nextflow.config new file mode 100644 index 00000000..1de3348f --- /dev/null +++ b/tests/modules/pairtools/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_DEDUP { + ext.suffix = '.dedup' + } + +} diff --git a/tests/modules/pairtools/dedup/test.yml b/tests/modules/pairtools/dedup/test.yml index 25fc51f7..6d7f99f4 100644 --- a/tests/modules/pairtools/dedup/test.yml +++ b/tests/modules/pairtools/dedup/test.yml @@ -1,5 +1,5 @@ - name: pairtools dedup test_pairtools_dedup - command: nextflow run tests/modules/pairtools/dedup -entry test_pairtools_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/dedup -entry test_pairtools_dedup -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/dedup/nextflow.config tags: - pairtools/dedup - pairtools diff --git a/tests/modules/pairtools/flip/main.nf b/tests/modules/pairtools/flip/main.nf index ed980102..e4d740e2 100644 --- a/tests/modules/pairtools/flip/main.nf +++ b/tests/modules/pairtools/flip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' addParams( options: [:] ) +include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' workflow test_pairtools_flip { diff --git a/tests/modules/pairtools/flip/nextflow.config b/tests/modules/pairtools/flip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairtools/flip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairtools/flip/test.yml b/tests/modules/pairtools/flip/test.yml index eeef6530..cec54976 100644 --- a/tests/modules/pairtools/flip/test.yml +++ b/tests/modules/pairtools/flip/test.yml @@ -1,5 +1,5 @@ - name: pairtools flip test_pairtools_flip - command: nextflow run tests/modules/pairtools/flip -entry test_pairtools_flip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/flip -entry test_pairtools_flip -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/flip/nextflow.config tags: - pairtools/flip - pairtools diff --git a/tests/modules/pairtools/parse/main.nf b/tests/modules/pairtools/parse/main.nf index 26ceaa4f..f006fd6a 100644 --- a/tests/modules/pairtools/parse/main.nf +++ b/tests/modules/pairtools/parse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' addParams( options: ['suffix':'.raw'] ) +include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' workflow test_pairtools_parse { diff --git a/tests/modules/pairtools/parse/nextflow.config b/tests/modules/pairtools/parse/nextflow.config new file mode 100644 index 00000000..1a1182f6 --- /dev/null +++ b/tests/modules/pairtools/parse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_PARSE { + ext.suffix = '.raw' + } + +} diff --git a/tests/modules/pairtools/parse/test.yml b/tests/modules/pairtools/parse/test.yml index e5d18e01..cf01038c 100644 --- a/tests/modules/pairtools/parse/test.yml +++ b/tests/modules/pairtools/parse/test.yml @@ -1,5 +1,5 @@ - name: pairtools parse test_pairtools_parse - command: nextflow run tests/modules/pairtools/parse -entry test_pairtools_parse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/parse -entry test_pairtools_parse -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/parse/nextflow.config tags: - pairtools - pairtools/parse diff --git a/tests/modules/pairtools/restrict/main.nf b/tests/modules/pairtools/restrict/main.nf index f785ed88..ae7e328b 100644 --- a/tests/modules/pairtools/restrict/main.nf +++ b/tests/modules/pairtools/restrict/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' addParams( options: ['suffix':'.restrict'] ) +include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' workflow test_pairtools_restrict { diff --git a/tests/modules/pairtools/restrict/nextflow.config b/tests/modules/pairtools/restrict/nextflow.config new file mode 100644 index 00000000..857d7534 --- /dev/null +++ b/tests/modules/pairtools/restrict/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_RESTRICT { + ext.suffix = '.restrict' + } + +} diff --git a/tests/modules/pairtools/restrict/test.yml b/tests/modules/pairtools/restrict/test.yml index afc64930..484b3739 100644 --- a/tests/modules/pairtools/restrict/test.yml +++ b/tests/modules/pairtools/restrict/test.yml @@ -1,5 +1,5 @@ - name: pairtools restrict test_pairtools_restrict - command: nextflow run tests/modules/pairtools/restrict -entry test_pairtools_restrict -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/restrict -entry test_pairtools_restrict -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/restrict/nextflow.config tags: - pairtools/restrict - pairtools diff --git a/tests/modules/pairtools/select/main.nf b/tests/modules/pairtools/select/main.nf index 2efd29c7..ff65cd95 100644 --- a/tests/modules/pairtools/select/main.nf +++ b/tests/modules/pairtools/select/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' addParams( options: [args:"(pair_type == 'RU') or (pair_type == 'UR') or (pair_type == 'UU')"] ) +include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' workflow test_pairtools_select { diff --git a/tests/modules/pairtools/select/nextflow.config b/tests/modules/pairtools/select/nextflow.config new file mode 100644 index 00000000..df33cd2e --- /dev/null +++ b/tests/modules/pairtools/select/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SELECT { + ext.args = "(pair_type == \'RU\') or (pair_type == \'UR\') or (pair_type == \'UU\')" + } + +} diff --git a/tests/modules/pairtools/select/test.yml b/tests/modules/pairtools/select/test.yml index adeb50c3..431e8366 100644 --- a/tests/modules/pairtools/select/test.yml +++ b/tests/modules/pairtools/select/test.yml @@ -1,5 +1,5 @@ - name: pairtools select test_pairtools_select - command: nextflow run tests/modules/pairtools/select -entry test_pairtools_select -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/select -entry test_pairtools_select -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/select/nextflow.config tags: - pairtools/select - pairtools diff --git a/tests/modules/pairtools/sort/main.nf b/tests/modules/pairtools/sort/main.nf index dfb505e0..0e484c76 100644 --- a/tests/modules/pairtools/sort/main.nf +++ b/tests/modules/pairtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' addParams( options: ['suffix':'.sorted'] ) +include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' workflow test_pairtools_sort { diff --git a/tests/modules/pairtools/sort/nextflow.config b/tests/modules/pairtools/sort/nextflow.config new file mode 100644 index 00000000..86b3d802 --- /dev/null +++ b/tests/modules/pairtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SORT { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/pairtools/sort/test.yml b/tests/modules/pairtools/sort/test.yml index 9eea74a0..4d4866aa 100644 --- a/tests/modules/pairtools/sort/test.yml +++ b/tests/modules/pairtools/sort/test.yml @@ -1,5 +1,5 @@ - name: pairtools sort test_pairtools_sort - command: nextflow run tests/modules/pairtools/sort -entry test_pairtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/sort -entry test_pairtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/sort/nextflow.config tags: - pairtools/sort - pairtools diff --git a/tests/modules/pangolin/main.nf b/tests/modules/pangolin/main.nf index b8130c5d..ab4aa4af 100644 --- a/tests/modules/pangolin/main.nf +++ b/tests/modules/pangolin/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PANGOLIN } from '../../../modules/pangolin/main.nf' addParams( options: [:] ) +include { PANGOLIN } from '../../../modules/pangolin/main.nf' workflow test_pangolin { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/pangolin/nextflow.config b/tests/modules/pangolin/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pangolin/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pangolin/test.yml b/tests/modules/pangolin/test.yml index 5fb5e79e..c77e4912 100644 --- a/tests/modules/pangolin/test.yml +++ b/tests/modules/pangolin/test.yml @@ -1,5 +1,5 @@ - name: pangolin - command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config -c ./tests/modules/pangolin/nextflow.config tags: - pangolin files: diff --git a/tests/modules/paraclu/main.nf b/tests/modules/paraclu/main.nf index f5101591..3bd75dc0 100644 --- a/tests/modules/paraclu/main.nf +++ b/tests/modules/paraclu/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PARACLU } from '../../../modules/paraclu/main.nf' addParams( options: [:] ) +include { PARACLU } from '../../../modules/paraclu/main.nf' workflow test_paraclu { diff --git a/tests/modules/paraclu/nextflow.config b/tests/modules/paraclu/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/paraclu/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/paraclu/test.yml b/tests/modules/paraclu/test.yml index 3aa3e8b4..36b37df5 100644 --- a/tests/modules/paraclu/test.yml +++ b/tests/modules/paraclu/test.yml @@ -1,5 +1,5 @@ - name: paraclu test_paraclu - command: nextflow run tests/modules/paraclu -entry test_paraclu -c tests/config/nextflow.config + command: nextflow run ./tests/modules/paraclu -entry test_paraclu -c ./tests/config/nextflow.config -c ./tests/modules/paraclu/nextflow.config tags: - paraclu files: diff --git a/tests/modules/pbbam/pbmerge/main.nf b/tests/modules/pbbam/pbmerge/main.nf index 9220af0c..34ed33a6 100644 --- a/tests/modules/pbbam/pbmerge/main.nf +++ b/tests/modules/pbbam/pbmerge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' addParams( options: [suffix: '.merged'] ) +include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' workflow test_pbbam_pbmerge { diff --git a/tests/modules/pbbam/pbmerge/nextflow.config b/tests/modules/pbbam/pbmerge/nextflow.config new file mode 100644 index 00000000..c897068b --- /dev/null +++ b/tests/modules/pbbam/pbmerge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBBAM_PBMERGE { + ext.suffix = '.merged' + } + +} diff --git a/tests/modules/pbbam/pbmerge/test.yml b/tests/modules/pbbam/pbmerge/test.yml index 4f334c0e..0a6d7da3 100644 --- a/tests/modules/pbbam/pbmerge/test.yml +++ b/tests/modules/pbbam/pbmerge/test.yml @@ -1,5 +1,5 @@ - name: pbbam pbmerge test_pbbam_pbmerge - command: nextflow run tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c ./tests/config/nextflow.config -c ./tests/modules/pbbam/pbmerge/nextflow.config tags: - pbbam/pbmerge - pbbam diff --git a/tests/modules/pbccs/main.nf b/tests/modules/pbccs/main.nf index 74c1b864..91a2ab30 100644 --- a/tests/modules/pbccs/main.nf +++ b/tests/modules/pbccs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBCCS } from '../../../modules/pbccs/main.nf' addParams( options: [args:'--min-rq 0.9'] ) +include { PBCCS } from '../../../modules/pbccs/main.nf' workflow test_pbccs { diff --git a/tests/modules/pbccs/nextflow.config b/tests/modules/pbccs/nextflow.config new file mode 100644 index 00000000..869909ce --- /dev/null +++ b/tests/modules/pbccs/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBCCS { + ext.args = '--min-rq 0.9' + } + +} diff --git a/tests/modules/pbccs/test.yml b/tests/modules/pbccs/test.yml index af225eb1..5d481923 100644 --- a/tests/modules/pbccs/test.yml +++ b/tests/modules/pbccs/test.yml @@ -1,5 +1,5 @@ - name: pbccs test_pbccs - command: nextflow run tests/modules/pbccs -entry test_pbccs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbccs -entry test_pbccs -c ./tests/config/nextflow.config -c ./tests/modules/pbccs/nextflow.config tags: - pbccs files: diff --git a/tests/modules/peddy/main.nf b/tests/modules/peddy/main.nf index d6331752..e53e8152 100644 --- a/tests/modules/peddy/main.nf +++ b/tests/modules/peddy/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PEDDY } from '../../../modules/peddy/main.nf' addParams( options: [:] ) +include { PEDDY } from '../../../modules/peddy/main.nf' workflow test_peddy { @@ -13,5 +13,5 @@ workflow test_peddy { ] ped = file(params.test_data['homo_sapiens']['genome']['justhusky_ped'], checkIfExists: true) - PEDDY ( input , ped ) + PEDDY ( input, ped ) } diff --git a/tests/modules/peddy/nextflow.config b/tests/modules/peddy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/peddy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/peddy/test.yml b/tests/modules/peddy/test.yml index 77bf00f6..0ed6dc94 100644 --- a/tests/modules/peddy/test.yml +++ b/tests/modules/peddy/test.yml @@ -1,5 +1,5 @@ - name: peddy test_peddy - command: nextflow run tests/modules/peddy -entry test_peddy -c tests/config/nextflow.config + command: nextflow run ./tests/modules/peddy -entry test_peddy -c ./tests/config/nextflow.config ./tests/modules/peddy/nextflow.config tags: - peddy files: diff --git a/tests/modules/phyloflash/main.nf b/tests/modules/phyloflash/main.nf index 754d6747..412e0321 100644 --- a/tests/modules/phyloflash/main.nf +++ b/tests/modules/phyloflash/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' addParams( options: [:] ) +include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' process STUB_PHYLOFLASH_DATABASE { output: @@ -19,22 +19,22 @@ process STUB_PHYLOFLASH_DATABASE { workflow test_phyloflash_single_end { STUB_PHYLOFLASH_DATABASE () - - input = [ + + input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] - PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) } workflow test_phyloflash_paired_end { STUB_PHYLOFLASH_DATABASE () - input = [ + input = [ [ id:'test', single_end:false ], // meta map - [ + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] diff --git a/tests/modules/phyloflash/nextflow.config b/tests/modules/phyloflash/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/phyloflash/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/phyloflash/test.yml b/tests/modules/phyloflash/test.yml index 0cba41c5..81eac2f2 100644 --- a/tests/modules/phyloflash/test.yml +++ b/tests/modules/phyloflash/test.yml @@ -1,5 +1,5 @@ - name: phyloflash single-end - command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run tags: - phyloflash files: @@ -7,7 +7,7 @@ md5sum: d41d8cd98f00b204e9800998ecf8427e - name: phyloflash paired-end - command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run tags: - phyloflash files: diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf index 24b031fc..2e8727b5 100644 --- a/tests/modules/picard/collecthsmetrics/main.nf +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' workflow test_picard_collecthsmetrics { diff --git a/tests/modules/picard/collecthsmetrics/nextflow.config b/tests/modules/picard/collecthsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml index 8c610abd..9232d508 100644 --- a/tests/modules/picard/collecthsmetrics/test.yml +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collecthsmetrics test_picard_collecthsmetrics - command: nextflow run tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collecthsmetrics/nextflow.config tags: - picard - picard/collecthsmetrics diff --git a/tests/modules/picard/collectmultiplemetrics/main.nf b/tests/modules/picard/collectmultiplemetrics/main.nf index 73ac0013..453ecc91 100644 --- a/tests/modules/picard/collectmultiplemetrics/main.nf +++ b/tests/modules/picard/collectmultiplemetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' workflow test_picard_collectmultiplemetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectmultiplemetrics/nextflow.config b/tests/modules/picard/collectmultiplemetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectmultiplemetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectmultiplemetrics/test.yml b/tests/modules/picard/collectmultiplemetrics/test.yml index fc4d0347..8fecca73 100644 --- a/tests/modules/picard/collectmultiplemetrics/test.yml +++ b/tests/modules/picard/collectmultiplemetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectmultiplemetrics - command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectmultiplemetrics/nextflow.config tags: - picard - picard/collectmultiplemetrics diff --git a/tests/modules/picard/collectwgsmetrics/main.nf b/tests/modules/picard/collectwgsmetrics/main.nf index 5bdf17ab..1d75a2bd 100644 --- a/tests/modules/picard/collectwgsmetrics/main.nf +++ b/tests/modules/picard/collectwgsmetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' workflow test_picard_collectwgsmetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectwgsmetrics/nextflow.config b/tests/modules/picard/collectwgsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectwgsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectwgsmetrics/test.yml b/tests/modules/picard/collectwgsmetrics/test.yml index 62e87e65..2daef406 100644 --- a/tests/modules/picard/collectwgsmetrics/test.yml +++ b/tests/modules/picard/collectwgsmetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectwgsmetrics test_picard_collectwgsmetrics - command: nextflow run tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectwgsmetrics/nextflow.config tags: - picard/collectwgsmetrics - picard diff --git a/tests/modules/picard/filtersamreads/main.nf b/tests/modules/picard/filtersamreads/main.nf index a03471dd..847bee57 100644 --- a/tests/modules/picard/filtersamreads/main.nf +++ b/tests/modules/picard/filtersamreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) -include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' addParams( options: [suffix:'.filtered'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' +include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' workflow test_picard_filtersamreads { diff --git a/tests/modules/picard/filtersamreads/nextflow.config b/tests/modules/picard/filtersamreads/nextflow.config new file mode 100644 index 00000000..e9ce4914 --- /dev/null +++ b/tests/modules/picard/filtersamreads/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.suffix = '.sorted' + } + + withName: PICARD_FILTERSAMREADS { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/picard/filtersamreads/test.yml b/tests/modules/picard/filtersamreads/test.yml index e8e73ed0..a0ab712b 100644 --- a/tests/modules/picard/filtersamreads/test.yml +++ b/tests/modules/picard/filtersamreads/test.yml @@ -1,5 +1,5 @@ - name: picard filtersamreads - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads @@ -9,7 +9,7 @@ - name: picard filtersamreads readlist - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 7c9c63cd..12f3ac26 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) -include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : 'ASSUME_SORT_ORDER=queryname' ] ) +include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/markduplicates/nextflow.config b/tests/modules/picard/markduplicates/nextflow.config new file mode 100644 index 00000000..9178c5b1 --- /dev/null +++ b/tests/modules/picard/markduplicates/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_MARKDUPLICATES_UNSORTED { + ext.args = 'ASSUME_SORT_ORDER=queryname' + } + +} diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 4c314814..beb54009 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -1,5 +1,5 @@ - name: picard markduplicates sorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates @@ -9,7 +9,7 @@ - "1.0 97 97" - path: ./output/picard/test.bam - name: picard markduplicates unsorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates diff --git a/tests/modules/picard/mergesamfiles/main.nf b/tests/modules/picard/mergesamfiles/main.nf index 5ddc849f..51c070b6 100644 --- a/tests/modules/picard/mergesamfiles/main.nf +++ b/tests/modules/picard/mergesamfiles/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' addParams( options: [:] ) +include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' workflow test_picard_mergesamfiles { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/mergesamfiles/nextflow.config b/tests/modules/picard/mergesamfiles/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/mergesamfiles/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/mergesamfiles/test.yml b/tests/modules/picard/mergesamfiles/test.yml index a331c96f..1cf59cb7 100644 --- a/tests/modules/picard/mergesamfiles/test.yml +++ b/tests/modules/picard/mergesamfiles/test.yml @@ -1,5 +1,5 @@ - name: picard mergesamfiles - command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c ./tests/config/nextflow.config -c ./tests/modules/picard/mergesamfiles/nextflow.config tags: - picard - picard/mergesamfiles diff --git a/tests/modules/picard/sortsam/main.nf b/tests/modules/picard/sortsam/main.nf index 0130fad6..1516682c 100644 --- a/tests/modules/picard/sortsam/main.nf +++ b/tests/modules/picard/sortsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' workflow test_picard_sortsam { diff --git a/tests/modules/picard/sortsam/nextflow.config b/tests/modules/picard/sortsam/nextflow.config new file mode 100644 index 00000000..2c290cbe --- /dev/null +++ b/tests/modules/picard/sortsam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/picard/sortsam/test.yml b/tests/modules/picard/sortsam/test.yml index 4443228e..61521850 100644 --- a/tests/modules/picard/sortsam/test.yml +++ b/tests/modules/picard/sortsam/test.yml @@ -1,5 +1,5 @@ - name: picard sortsam - command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c ./tests/config/nextflow.config -c ./tests/modules/picard/sortsam/nextflow.config tags: - picard - picard/sortsam diff --git a/tests/modules/pirate/main.nf b/tests/modules/pirate/main.nf index 5957b1e6..05e5bdd8 100644 --- a/tests/modules/pirate/main.nf +++ b/tests/modules/pirate/main.nf @@ -2,15 +2,22 @@ nextflow.enable.dsl = 2 -include { PIRATE } from '../../../modules/pirate/main.nf' addParams( options: [:] ) +include { PIRATE } from '../../../modules/pirate/main.nf' workflow test_pirate { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] + // [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + // ] PIRATE ( input ) } diff --git a/tests/modules/pirate/nextflow.config b/tests/modules/pirate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pirate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pirate/test.yml b/tests/modules/pirate/test.yml index d8c4d0c4..b8d36b95 100644 --- a/tests/modules/pirate/test.yml +++ b/tests/modules/pirate/test.yml @@ -1,5 +1,5 @@ - name: pirate test_pirate - command: nextflow run tests/modules/pirate -entry test_pirate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pirate -entry test_pirate -c ./tests/config/nextflow.config -c ./tests/modules/pirate/nextflow.config tags: - pirate files: diff --git a/tests/modules/plasmidid/main.nf b/tests/modules/plasmidid/main.nf index 1dd57daf..52d25a91 100644 --- a/tests/modules/plasmidid/main.nf +++ b/tests/modules/plasmidid/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLASMIDID } from '../../../modules/plasmidid/main.nf' addParams ( options: ['args' : '-k 0.8'] ) +include { PLASMIDID } from '../../../modules/plasmidid/main.nf' workflow test_plasmidid { diff --git a/tests/modules/plasmidid/nextflow.config b/tests/modules/plasmidid/nextflow.config new file mode 100644 index 00000000..2090bfae --- /dev/null +++ b/tests/modules/plasmidid/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLASMIDID { + ext.args = '-k 0.8' + } + +} diff --git a/tests/modules/plasmidid/test.yml b/tests/modules/plasmidid/test.yml index 838af394..cd0528cb 100644 --- a/tests/modules/plasmidid/test.yml +++ b/tests/modules/plasmidid/test.yml @@ -1,5 +1,5 @@ - name: plasmidid - command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c ./tests/config/nextflow.config -c ./tests/modules/plasmidid/nextflow.config tags: - plasmidid files: diff --git a/tests/modules/plink/extract/main.nf b/tests/modules/plink/extract/main.nf index e031a7b7..6beb0469 100644 --- a/tests/modules/plink/extract/main.nf +++ b/tests/modules/plink/extract/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams ( options: [args:'--make-bed --set-missing-var-ids @:#:\\$1:\\$2']) -include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' addParams( options: [suffix:'.extract'] ) +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' +include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' workflow test_plink_extract { diff --git a/tests/modules/plink/extract/nextflow.config b/tests/modules/plink/extract/nextflow.config new file mode 100644 index 00000000..12668b01 --- /dev/null +++ b/tests/modules/plink/extract/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = '--make-bed --set-missing-var-ids @:#:\\$1:\\$2' + } + + withName: PLINK_EXTRACT { + ext.suffix = '.extract' + } + +} diff --git a/tests/modules/plink/extract/test.yml b/tests/modules/plink/extract/test.yml index 40569d9d..87cf82cc 100644 --- a/tests/modules/plink/extract/test.yml +++ b/tests/modules/plink/extract/test.yml @@ -1,5 +1,5 @@ - name: plink extract test_plink_extract - command: nextflow run tests/modules/plink/extract -entry test_plink_extract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink/extract -entry test_plink_extract -c ./tests/config/nextflow.config -c ./tests/modules/plink/extract/nextflow.config tags: - plink - plink/extract diff --git a/tests/modules/plink/vcf/main.nf b/tests/modules/plink/vcf/main.nf index 096bacdd..4dac8978 100644 --- a/tests/modules/plink/vcf/main.nf +++ b/tests/modules/plink/vcf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams( options: ['args':" --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr"]) +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' workflow test_plink_vcf { diff --git a/tests/modules/plink/vcf/nextflow.config b/tests/modules/plink/vcf/nextflow.config new file mode 100644 index 00000000..f0b72c8d --- /dev/null +++ b/tests/modules/plink/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = ' --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr' + } + +} diff --git a/tests/modules/plink/vcf/test.yml b/tests/modules/plink/vcf/test.yml index bfd54386..9042d14a 100644 --- a/tests/modules/plink/vcf/test.yml +++ b/tests/modules/plink/vcf/test.yml @@ -1,5 +1,5 @@ - name: plink vcf test_plink_vcf - command: nextflow run tests/modules/plink/vcf -entry test_plink_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink/vcf -entry test_plink_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink/vcf/nextflow.config tags: - plink - plink/vcf diff --git a/tests/modules/plink2/vcf/main.nf b/tests/modules/plink2/vcf/main.nf index 409e7995..08d7dc61 100644 --- a/tests/modules/plink2/vcf/main.nf +++ b/tests/modules/plink2/vcf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' addParams( options: [args:'--allow-extra-chr'] ) +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' workflow test_plink2_vcf { diff --git a/tests/modules/plink2/vcf/nextflow.config b/tests/modules/plink2/vcf/nextflow.config new file mode 100644 index 00000000..7f7e5e77 --- /dev/null +++ b/tests/modules/plink2/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK2_VCF { + ext.args = '--allow-extra-chr' + } + +} diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml index 3f0cddc6..52f58a42 100644 --- a/tests/modules/plink2/vcf/test.yml +++ b/tests/modules/plink2/vcf/test.yml @@ -1,5 +1,5 @@ - name: plink2 vcf test_plink2_vcf - command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink2/vcf -entry test_plink2_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink2/vcf/nextflow.config tags: - plink2/vcf - plink2 diff --git a/tests/modules/pmdtools/filter/main.nf b/tests/modules/pmdtools/filter/main.nf index c4832bbb..f1b2b4d3 100644 --- a/tests/modules/pmdtools/filter/main.nf +++ b/tests/modules/pmdtools/filter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' addParams( options: [:] ) +include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' workflow test_pmdtools_filter { diff --git a/tests/modules/pmdtools/filter/nextflow.config b/tests/modules/pmdtools/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pmdtools/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pmdtools/filter/test.yml b/tests/modules/pmdtools/filter/test.yml index 9171b02e..a7ebefbe 100644 --- a/tests/modules/pmdtools/filter/test.yml +++ b/tests/modules/pmdtools/filter/test.yml @@ -1,5 +1,5 @@ - name: pmdtools filter - command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c ./tests/config/nextflow.config -c ./tests/modules/pmdtools/filter/nextflow.config tags: - pmdtools - pmdtools/filter diff --git a/tests/modules/porechop/main.nf b/tests/modules/porechop/main.nf index b6d7bafa..f20b7a6e 100644 --- a/tests/modules/porechop/main.nf +++ b/tests/modules/porechop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PORECHOP } from '../../../modules/porechop/main.nf' addParams( options: [args: '', suffix: '_porechop'] ) +include { PORECHOP } from '../../../modules/porechop/main.nf' workflow test_porechop { diff --git a/tests/modules/porechop/nextflow.config b/tests/modules/porechop/nextflow.config new file mode 100644 index 00000000..3a0536b0 --- /dev/null +++ b/tests/modules/porechop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PORECHOP { + ext.args = '' + ext.suffix = '_porechop' + } + +} diff --git a/tests/modules/porechop/test.yml b/tests/modules/porechop/test.yml index b37a7ec4..8790ab87 100644 --- a/tests/modules/porechop/test.yml +++ b/tests/modules/porechop/test.yml @@ -1,5 +1,5 @@ - name: porechop test_porechop - command: nextflow run tests/modules/porechop -entry test_porechop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/porechop -entry test_porechop -c ./tests/config/nextflow.config -c ./tests/modules/porechop/nextflow.config tags: - porechop files: diff --git a/tests/modules/preseq/lcextrap/main.nf b/tests/modules/preseq/lcextrap/main.nf index 390039bd..4bbbd146 100644 --- a/tests/modules/preseq/lcextrap/main.nf +++ b/tests/modules/preseq/lcextrap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' addParams( options: [:] ) +include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' // // Test with single-end data diff --git a/tests/modules/preseq/lcextrap/nextflow.config b/tests/modules/preseq/lcextrap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/preseq/lcextrap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/preseq/lcextrap/test.yml b/tests/modules/preseq/lcextrap/test.yml index 4472a485..ecd1d046 100644 --- a/tests/modules/preseq/lcextrap/test.yml +++ b/tests/modules/preseq/lcextrap/test.yml @@ -1,5 +1,5 @@ - name: preseq lcextrap single-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap @@ -9,7 +9,7 @@ - path: output/preseq/test.command.log - name: preseq lcextrap paired-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap diff --git a/tests/modules/prodigal/main.nf b/tests/modules/prodigal/main.nf index 414585a1..6e282015 100644 --- a/tests/modules/prodigal/main.nf +++ b/tests/modules/prodigal/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRODIGAL } from '../../../modules/prodigal/main.nf' addParams( options: [:] ) +include { PRODIGAL } from '../../../modules/prodigal/main.nf' workflow test_prodigal { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/prodigal/nextflow.config b/tests/modules/prodigal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prodigal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prodigal/test.yml b/tests/modules/prodigal/test.yml index 93caa998..7f0ab88c 100644 --- a/tests/modules/prodigal/test.yml +++ b/tests/modules/prodigal/test.yml @@ -1,5 +1,5 @@ - name: prodigal test_prodigal - command: nextflow run tests/modules/prodigal -entry test_prodigal -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prodigal -entry test_prodigal -c ./tests/config/nextflow.config -c ./tests/modules/prodigal/nextflow.config tags: - prodigal files: diff --git a/tests/modules/prokka/main.nf b/tests/modules/prokka/main.nf index e35cb1d9..97e94ca8 100644 --- a/tests/modules/prokka/main.nf +++ b/tests/modules/prokka/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PROKKA } from '../../../modules/prokka/main.nf' addParams( options: [:] ) +include { PROKKA } from '../../../modules/prokka/main.nf' workflow test_prokka { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/prokka/nextflow.config b/tests/modules/prokka/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prokka/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prokka/test.yml b/tests/modules/prokka/test.yml index 2823353c..92f813a7 100644 --- a/tests/modules/prokka/test.yml +++ b/tests/modules/prokka/test.yml @@ -1,5 +1,5 @@ - name: prokka - command: nextflow run ./tests/modules/prokka -entry test_prokka -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prokka -entry test_prokka -c ./tests/config/nextflow.config -c ./tests/modules/prokka/nextflow.config tags: - prokka files: diff --git a/tests/modules/pycoqc/main.nf b/tests/modules/pycoqc/main.nf index ab65dadc..c8a8ee2c 100644 --- a/tests/modules/pycoqc/main.nf +++ b/tests/modules/pycoqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYCOQC } from '../../../modules/pycoqc/main.nf' addParams ( options: ['args' : '--min_pass_qual 0'] ) +include { PYCOQC } from '../../../modules/pycoqc/main.nf' workflow test_pycoqc { diff --git a/tests/modules/pycoqc/nextflow.config b/tests/modules/pycoqc/nextflow.config new file mode 100644 index 00000000..d532f8f7 --- /dev/null +++ b/tests/modules/pycoqc/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PYCOQC { + ext.args = '--min_pass_qual 0' + } + +} diff --git a/tests/modules/pycoqc/test.yml b/tests/modules/pycoqc/test.yml index 052e3e1a..becd911b 100644 --- a/tests/modules/pycoqc/test.yml +++ b/tests/modules/pycoqc/test.yml @@ -1,5 +1,5 @@ - name: pycoqc - command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c ./tests/config/nextflow.config -c ./tests/modules/pycoqc/nextflow.config tags: - pycoqc files: diff --git a/tests/modules/pydamage/analyze/main.nf b/tests/modules/pydamage/analyze/main.nf index ddf0b27a..920a4201 100644 --- a/tests/modules/pydamage/analyze/main.nf +++ b/tests/modules/pydamage/analyze/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/analyze/nextflow.config b/tests/modules/pydamage/analyze/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/analyze/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml index 157e947f..9d22f20e 100644 --- a/tests/modules/pydamage/analyze/test.yml +++ b/tests/modules/pydamage/analyze/test.yml @@ -1,5 +1,5 @@ - name: pydamage analyze test workflow - command: nextflow run tests/modules/pydamage/analyze -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/analyze -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/analyze/nextflow.config tags: - pydamage - pydamage/analyze diff --git a/tests/modules/pydamage/filter/main.nf b/tests/modules/pydamage/filter/main.nf index 03e90408..dac03e78 100644 --- a/tests/modules/pydamage/filter/main.nf +++ b/tests/modules/pydamage/filter/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) -include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' +include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/filter/nextflow.config b/tests/modules/pydamage/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/filter/test.yml b/tests/modules/pydamage/filter/test.yml index 248be44b..b6738e3d 100644 --- a/tests/modules/pydamage/filter/test.yml +++ b/tests/modules/pydamage/filter/test.yml @@ -1,5 +1,5 @@ - name: pydamage filter test workflow - command: nextflow run tests/modules/pydamage/filter -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/filter -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/filter/nextflow.config tags: - pydamage - pydamage/filter @@ -7,4 +7,4 @@ - path: output/pydamage/pydamage_results/pydamage_filtered_results.csv md5sum: 9f297233cf4932d7d7e52cc72d4727dc - path: output/pydamage/pydamage_results/pydamage_results.csv - md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 + md5sum: 37ee6b4dee6890fd2ec8550337f21ac9 diff --git a/tests/modules/qcat/main.nf b/tests/modules/qcat/main.nf index 72c87e37..8a5cdd6d 100644 --- a/tests/modules/qcat/main.nf +++ b/tests/modules/qcat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QCAT } from '../../../modules/qcat/main.nf' addParams( options: [:] ) +include { QCAT } from '../../../modules/qcat/main.nf' workflow test_qcat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/qcat/nextflow.config b/tests/modules/qcat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qcat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qcat/test.yml b/tests/modules/qcat/test.yml index 5c43841b..47ece983 100644 --- a/tests/modules/qcat/test.yml +++ b/tests/modules/qcat/test.yml @@ -1,5 +1,5 @@ - name: qcat - command: nextflow run ./tests/modules/qcat -entry test_qcat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qcat -entry test_qcat -c ./tests/config/nextflow.config -c ./tests/modules/qcat/nextflow.config tags: - qcat files: diff --git a/tests/modules/qualimap/bamqc/main.nf b/tests/modules/qualimap/bamqc/main.nf index 803d0220..a17efd59 100644 --- a/tests/modules/qualimap/bamqc/main.nf +++ b/tests/modules/qualimap/bamqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' addParams( options: [:] ) +include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' workflow test_qualimap_bamqc { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/qualimap/bamqc/nextflow.config b/tests/modules/qualimap/bamqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qualimap/bamqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qualimap/bamqc/test.yml b/tests/modules/qualimap/bamqc/test.yml index 7d746a51..41c4199e 100644 --- a/tests/modules/qualimap/bamqc/test.yml +++ b/tests/modules/qualimap/bamqc/test.yml @@ -1,5 +1,5 @@ - name: qualimap bamqc test workflow - command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c ./tests/config/nextflow.config -c ./tests/modules/qualimap/bamqc/nextflow.config tags: - qualimap - qualimap/bamqc diff --git a/tests/modules/quast/main.nf b/tests/modules/quast/main.nf index d263470c..c879a8a9 100644 --- a/tests/modules/quast/main.nf +++ b/tests/modules/quast/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUAST } from '../../../modules/quast/main.nf' addParams(options: [:]) +include { QUAST } from '../../../modules/quast/main.nf' workflow test_quast_ref { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/quast/nextflow.config b/tests/modules/quast/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/quast/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/quast/test.yml b/tests/modules/quast/test.yml index 6e1f991f..166cd896 100644 --- a/tests/modules/quast/test.yml +++ b/tests/modules/quast/test.yml @@ -1,5 +1,5 @@ - name: quast with reference - command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: @@ -82,7 +82,7 @@ - path: ./output/quast/quast/icarus_viewers/contig_size_viewer.html - name: quast without reference - command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: diff --git a/tests/modules/racon/main.nf b/tests/modules/racon/main.nf index b6b864e1..507d8d8d 100644 --- a/tests/modules/racon/main.nf +++ b/tests/modules/racon/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RACON } from '../../../modules/racon/main.nf' addParams( options: [:] ) +include { RACON } from '../../../modules/racon/main.nf' workflow test_racon { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/racon/nextflow.config b/tests/modules/racon/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/racon/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/racon/test.yml b/tests/modules/racon/test.yml index dc8e57dc..0250fa36 100644 --- a/tests/modules/racon/test.yml +++ b/tests/modules/racon/test.yml @@ -1,5 +1,5 @@ - name: racon test_racon - command: nextflow run tests/modules/racon -entry test_racon -c tests/config/nextflow.config + command: nextflow run ./tests/modules/racon -entry test_racon -c ./tests/config/nextflow.config -c ./tests/modules/racon/nextflow.config tags: - racon files: diff --git a/tests/modules/rapidnj/main.nf b/tests/modules/rapidnj/main.nf index e23fa46f..66d19c3c 100644 --- a/tests/modules/rapidnj/main.nf +++ b/tests/modules/rapidnj/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' addParams( options: [:] ) +include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' workflow test_rapidnj { diff --git a/tests/modules/rapidnj/nextflow.config b/tests/modules/rapidnj/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rapidnj/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rapidnj/test.yml b/tests/modules/rapidnj/test.yml index 0b7ecff5..21f6ead9 100644 --- a/tests/modules/rapidnj/test.yml +++ b/tests/modules/rapidnj/test.yml @@ -1,5 +1,5 @@ - name: rapidnj - command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c ./tests/config/nextflow.config -c ./tests/modules/rapidnj/nextflow.config tags: - rapidnj files: diff --git a/tests/modules/rasusa/main.nf b/tests/modules/rasusa/main.nf index 9cc139ad..8a11627c 100644 --- a/tests/modules/rasusa/main.nf +++ b/tests/modules/rasusa/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RASUSA } from '../../../modules/rasusa/main.nf' addParams( options: ['suffix':'_100X']) +include { RASUSA } from '../../../modules/rasusa/main.nf' workflow test_rasusa { input = [ [ id:'test', single_end:false], // meta map diff --git a/tests/modules/rasusa/nextflow.config b/tests/modules/rasusa/nextflow.config new file mode 100644 index 00000000..fea844ae --- /dev/null +++ b/tests/modules/rasusa/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RASUSA { + ext.suffix = '_100X' + } + +} diff --git a/tests/modules/rasusa/test.yml b/tests/modules/rasusa/test.yml index bb30c99e..41c56b67 100644 --- a/tests/modules/rasusa/test.yml +++ b/tests/modules/rasusa/test.yml @@ -1,5 +1,5 @@ - name: rasusa test_rasusa - command: nextflow run tests/modules/rasusa -entry test_rasusa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rasusa -entry test_rasusa -c ./tests/config/nextflow.config -c ./tests/modules/rasusa/nextflow.config tags: - rasusa files: diff --git a/tests/modules/raxmlng/main.nf b/tests/modules/raxmlng/main.nf index 2cac6b31..5fad6953 100644 --- a/tests/modules/raxmlng/main.nf +++ b/tests/modules/raxmlng/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--model GTR+G'] ) -include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--all --model GTR+G --bs-trees 1000'] ) +include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' +include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' // // Test without bootstrapping diff --git a/tests/modules/raxmlng/nextflow.config b/tests/modules/raxmlng/nextflow.config new file mode 100644 index 00000000..8c269a9b --- /dev/null +++ b/tests/modules/raxmlng/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RAXMLNG_NO_BOOTSTRAP { + ext.args = '--model GTR+G' + } + + withName: RAXMLNG_BOOTSTRAP { + ext.args = '--all --model GTR+G --bs-trees 1000' + } + +} diff --git a/tests/modules/raxmlng/test.yml b/tests/modules/raxmlng/test.yml index 950c48ad..735b6a74 100644 --- a/tests/modules/raxmlng/test.yml +++ b/tests/modules/raxmlng/test.yml @@ -1,5 +1,5 @@ - name: raxmlng no_bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: @@ -11,7 +11,7 @@ - 'sample4:0.111' - name: raxmlng bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: diff --git a/tests/modules/rmarkdownnotebook/main.nf b/tests/modules/rmarkdownnotebook/main.nf index e56d54ff..fdb7d3b9 100644 --- a/tests/modules/rmarkdownnotebook/main.nf +++ b/tests/modules/rmarkdownnotebook/main.nf @@ -2,12 +2,8 @@ nextflow.enable.dsl = 2 -include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - options: [:] -) +include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' +include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' workflow test_rmarkdown { diff --git a/tests/modules/rmarkdownnotebook/nextflow.config b/tests/modules/rmarkdownnotebook/nextflow.config new file mode 100644 index 00000000..c99f5250 --- /dev/null +++ b/tests/modules/rmarkdownnotebook/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RMARKDOWNNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: RMARKDOWNNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/rmarkdownnotebook/test.yml b/tests/modules/rmarkdownnotebook/test.yml index bef6086a..3645514a 100644 --- a/tests/modules/rmarkdownnotebook/test.yml +++ b/tests/modules/rmarkdownnotebook/test.yml @@ -1,5 +1,5 @@ - name: rmarkdownnotebook test_rmarkdown - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: @@ -12,7 +12,7 @@ - "n_iter = 10" - name: rmarkdownnotebook test_rmarkdown_parametrize - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: diff --git a/tests/modules/roary/main.nf b/tests/modules/roary/main.nf index a4a96d6e..3fae516c 100644 --- a/tests/modules/roary/main.nf +++ b/tests/modules/roary/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { ROARY } from '../../../modules/roary/main.nf' addParams( options: [:] ) +include { ROARY } from '../../../modules/roary/main.nf' workflow test_roary { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] ROARY ( input ) diff --git a/tests/modules/roary/nextflow.config b/tests/modules/roary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/roary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml index c8e8c33d..981ab51c 100644 --- a/tests/modules/roary/test.yml +++ b/tests/modules/roary/test.yml @@ -1,5 +1,5 @@ - name: roary test_roary - command: nextflow run tests/modules/roary -entry test_roary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/roary -entry test_roary -c ./tests/config/nextflow.config -c ./tests/modules/roary/nextflow.config tags: - roary files: @@ -8,9 +8,9 @@ - path: output/roary/results/accessory.tab contains: ['FT'] - path: output/roary/results/accessory_binary_genes.fa - md5sum: 0baeea4947bf17a2bf29d43a44f0278f + md5sum: d4191cf748dd8016ad877857a034bef3 - path: output/roary/results/accessory_binary_genes.fa.newick - md5sum: b1f8c76ab231bd38b850c1f8d3c1584b + md5sum: d4a2a64e781263ca1b9b3a4bc9d3a6ea - path: output/roary/results/accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/blast_identity_frequency.Rtab @@ -20,7 +20,7 @@ - path: output/roary/results/core_accessory.header.embl contains: ['ID Genome standard; DNA; PRO; 1234 BP.'] - path: output/roary/results/core_accessory.tab - contains: ['FT /taxa="GCF_000292685 GCF_000298385 GCF_002849995"'] + contains: ['FT /taxa="test1 test2 test3"'] - path: output/roary/results/core_accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/gene_presence_absence.Rtab diff --git a/tests/modules/rsem/calculateexpression/main.nf b/tests/modules/rsem/calculateexpression/main.nf index e7de83a4..9d6d3c5c 100644 --- a/tests/modules/rsem/calculateexpression/main.nf +++ b/tests/modules/rsem/calculateexpression/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [args: "--star"]) -include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' addParams(options: [args: "--star --star-gzipped-read-file"]) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' +include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' workflow test_rsem_calculateexpression { diff --git a/tests/modules/rsem/calculateexpression/nextflow.config b/tests/modules/rsem/calculateexpression/nextflow.config new file mode 100644 index 00000000..b17a1cf2 --- /dev/null +++ b/tests/modules/rsem/calculateexpression/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RSEM_PREPAREREFERENCE { + ext.args = '--star' + } + + withName: RSEM_CALCULATEEXPRESSION { + ext.args = '--star --star-gzipped-read-file' + } + +} diff --git a/tests/modules/rsem/calculateexpression/test.yml b/tests/modules/rsem/calculateexpression/test.yml index ac0866ea..f19c3398 100644 --- a/tests/modules/rsem/calculateexpression/test.yml +++ b/tests/modules/rsem/calculateexpression/test.yml @@ -1,55 +1,55 @@ - name: rsem calculateexpression test_rsem_calculateexpression - command: nextflow run tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c ./tests/config/nextflow.config -c ./tests/modules/rsem/calculateexpression/nextflow.config tags: - rsem - rsem/calculateexpression files: - - path: output/index/rsem/Genome + - path: output/rsem/rsem/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/rsem/Log.out - - path: output/index/rsem/SA + - path: output/rsem/rsem/Log.out + - path: output/rsem/rsem/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/rsem/SAindex + - path: output/rsem/rsem/SAindex md5sum: fd05c149960e72642a8d7c860528ae81 - - path: output/index/rsem/chrLength.txt + - path: output/rsem/rsem/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/rsem/chrName.txt + - path: output/rsem/rsem/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/rsem/chrNameLength.txt + - path: output/rsem/rsem/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/chrStart.txt + - path: output/rsem/rsem/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/rsem/exonGeTrInfo.tab + - path: output/rsem/rsem/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/rsem/exonInfo.tab + - path: output/rsem/rsem/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/rsem/geneInfo.tab + - path: output/rsem/rsem/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/rsem/genome.chrlist + - path: output/rsem/rsem/genome.chrlist md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/genome.fasta + - path: output/rsem/rsem/genome.fasta md5sum: f315020d899597c1b57e5fe9f60f4c3e - - path: output/index/rsem/genome.grp + - path: output/rsem/rsem/genome.grp md5sum: c2848a8b6d495956c11ec53efc1de67e - - path: output/index/rsem/genome.idx.fa + - path: output/rsem/rsem/genome.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.n2g.idx.fa + - path: output/rsem/rsem/genome.n2g.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.seq + - path: output/rsem/rsem/genome.seq md5sum: 94da0c6b88c33e63c9a052a11f4f57c1 - - path: output/index/rsem/genome.ti + - path: output/rsem/rsem/genome.ti md5sum: c9e4ae8d4d13a504eec2acf1b8589a66 - - path: output/index/rsem/genome.transcripts.fa + - path: output/rsem/rsem/genome.transcripts.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genomeParameters.txt + - path: output/rsem/rsem/genomeParameters.txt md5sum: 2fe3a030e1706c3e8cd4df3818e6dd2f - - path: output/index/rsem/sjdbInfo.txt + - path: output/rsem/rsem/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/rsem/sjdbList.fromGTF.out.tab + - path: output/rsem/rsem/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/rsem/sjdbList.out.tab + - path: output/rsem/rsem/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/rsem/transcriptInfo.tab + - path: output/rsem/rsem/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/rsem/test.genes.results md5sum: c7ec226f76736ea805771e73553ae359 diff --git a/tests/modules/rsem/preparereference/main.nf b/tests/modules/rsem/preparereference/main.nf index 2d4a9053..8062737d 100644 --- a/tests/modules/rsem/preparereference/main.nf +++ b/tests/modules/rsem/preparereference/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [publish_dir:'rsem']) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' workflow test_rsem_preparereference { diff --git a/tests/modules/rsem/preparereference/nextflow.config b/tests/modules/rsem/preparereference/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rsem/preparereference/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rsem/preparereference/test.yml b/tests/modules/rsem/preparereference/test.yml index 734a92b2..1f058bea 100644 --- a/tests/modules/rsem/preparereference/test.yml +++ b/tests/modules/rsem/preparereference/test.yml @@ -1,5 +1,5 @@ - name: rsem preparereference test_rsem_preparereference - command: nextflow run tests/modules/rsem/preparereference -entry test_rsem_preparereference -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/preparereference -entry test_rsem_preparereference -c ./tests/config/nextflow.config -c ./tests/modules/rsem/preparereference/nextflow.config tags: - rsem - rsem/preparereference diff --git a/tests/modules/rseqc/bamstat/main.nf b/tests/modules/rseqc/bamstat/main.nf index c13e7f97..4c53a1af 100644 --- a/tests/modules/rseqc/bamstat/main.nf +++ b/tests/modules/rseqc/bamstat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' addParams(options: [:]) +include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' workflow test_rseqc_bamstat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/bamstat/nextflow.config b/tests/modules/rseqc/bamstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/bamstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/bamstat/test.yml b/tests/modules/rseqc/bamstat/test.yml index 75d62672..4cb35d0c 100644 --- a/tests/modules/rseqc/bamstat/test.yml +++ b/tests/modules/rseqc/bamstat/test.yml @@ -1,5 +1,5 @@ - name: rseqc bamstat test_rseqc_bamstat - command: nextflow run tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/bamstat/nextflow.config tags: - rseqc - rseqc/bamstat diff --git a/tests/modules/rseqc/inferexperiment/main.nf b/tests/modules/rseqc/inferexperiment/main.nf index ae8c53a9..6337063d 100644 --- a/tests/modules/rseqc/inferexperiment/main.nf +++ b/tests/modules/rseqc/inferexperiment/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' addParams(options: [:]) +include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' workflow test_rseqc_inferexperiment { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/inferexperiment/nextflow.config b/tests/modules/rseqc/inferexperiment/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/inferexperiment/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/inferexperiment/test.yml b/tests/modules/rseqc/inferexperiment/test.yml index 59d6f3d5..554f8317 100644 --- a/tests/modules/rseqc/inferexperiment/test.yml +++ b/tests/modules/rseqc/inferexperiment/test.yml @@ -1,5 +1,5 @@ - name: rseqc inferexperiment test_rseqc_inferexperiment - command: nextflow run tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/inferexperiment/nextflow.config tags: - rseqc - rseqc/inferexperiment diff --git a/tests/modules/rseqc/innerdistance/main.nf b/tests/modules/rseqc/innerdistance/main.nf index 003e8a14..8cc0ec3e 100644 --- a/tests/modules/rseqc/innerdistance/main.nf +++ b/tests/modules/rseqc/innerdistance/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' addParams(options: [:]) +include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' workflow test_rseqc_innerdistance { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/innerdistance/nextflow.config b/tests/modules/rseqc/innerdistance/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/innerdistance/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/innerdistance/test.yml b/tests/modules/rseqc/innerdistance/test.yml index b0ee0283..c0f480e9 100644 --- a/tests/modules/rseqc/innerdistance/test.yml +++ b/tests/modules/rseqc/innerdistance/test.yml @@ -1,5 +1,5 @@ - name: rseqc innerdistance test_rseqc_innerdistance - command: nextflow run tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/innerdistance/nextflow.config tags: - rseqc - rseqc/innerdistance diff --git a/tests/modules/rseqc/junctionannotation/main.nf b/tests/modules/rseqc/junctionannotation/main.nf index a6913850..303dcd85 100644 --- a/tests/modules/rseqc/junctionannotation/main.nf +++ b/tests/modules/rseqc/junctionannotation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' workflow test_rseqc_junctionannotation { input = [ diff --git a/tests/modules/rseqc/junctionannotation/nextflow.config b/tests/modules/rseqc/junctionannotation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionannotation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionannotation/test.yml b/tests/modules/rseqc/junctionannotation/test.yml index 39326f67..f2020b10 100644 --- a/tests/modules/rseqc/junctionannotation/test.yml +++ b/tests/modules/rseqc/junctionannotation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionannotation test_rseqc_junctionannotation - command: nextflow run tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionannotation/nextflow.config tags: - rseqc - rseqc/junctionannotation diff --git a/tests/modules/rseqc/junctionsaturation/main.nf b/tests/modules/rseqc/junctionsaturation/main.nf index 047fb372..eefbb492 100644 --- a/tests/modules/rseqc/junctionsaturation/main.nf +++ b/tests/modules/rseqc/junctionsaturation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' workflow test_rseqc_junctionsaturation { input = [ diff --git a/tests/modules/rseqc/junctionsaturation/nextflow.config b/tests/modules/rseqc/junctionsaturation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionsaturation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionsaturation/test.yml b/tests/modules/rseqc/junctionsaturation/test.yml index dfadb371..db977360 100644 --- a/tests/modules/rseqc/junctionsaturation/test.yml +++ b/tests/modules/rseqc/junctionsaturation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionsaturation test_rseqc_junctionsaturation - command: nextflow run tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionsaturation/nextflow.config tags: - rseqc/junctionsaturation - rseqc diff --git a/tests/modules/rseqc/readdistribution/main.nf b/tests/modules/rseqc/readdistribution/main.nf index 415aed9a..180367f2 100644 --- a/tests/modules/rseqc/readdistribution/main.nf +++ b/tests/modules/rseqc/readdistribution/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' addParams(options: [:]) +include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' workflow test_rseqc_readdistribution { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readdistribution/nextflow.config b/tests/modules/rseqc/readdistribution/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readdistribution/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readdistribution/test.yml b/tests/modules/rseqc/readdistribution/test.yml index 79e7e1d3..e530e92a 100644 --- a/tests/modules/rseqc/readdistribution/test.yml +++ b/tests/modules/rseqc/readdistribution/test.yml @@ -1,5 +1,5 @@ - name: rseqc readdistribution test_rseqc_readdistribution - command: nextflow run tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readdistribution/nextflow.config tags: - rseqc - rseqc/readdistribution diff --git a/tests/modules/rseqc/readduplication/main.nf b/tests/modules/rseqc/readduplication/main.nf index b94f6945..bcccde5d 100644 --- a/tests/modules/rseqc/readduplication/main.nf +++ b/tests/modules/rseqc/readduplication/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' addParams(options: [:]) +include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' workflow test_rseqc_readduplication { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readduplication/nextflow.config b/tests/modules/rseqc/readduplication/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readduplication/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readduplication/test.yml b/tests/modules/rseqc/readduplication/test.yml index 2a4c9546..b0c35071 100644 --- a/tests/modules/rseqc/readduplication/test.yml +++ b/tests/modules/rseqc/readduplication/test.yml @@ -1,5 +1,5 @@ - name: rseqc readduplication test_rseqc_readduplication - command: nextflow run tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readduplication/nextflow.config tags: - rseqc/readduplication - rseqc diff --git a/tests/modules/salmon/index/main.nf b/tests/modules/salmon/index/main.nf index 98804733..680b4c6e 100644 --- a/tests/modules/salmon/index/main.nf +++ b/tests/modules/salmon/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [publish_dir:'salmon'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' workflow test_salmon_index { genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/salmon/index/nextflow.config b/tests/modules/salmon/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/salmon/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/salmon/index/test.yml b/tests/modules/salmon/index/test.yml index acefb044..07815e37 100644 --- a/tests/modules/salmon/index/test.yml +++ b/tests/modules/salmon/index/test.yml @@ -1,5 +1,5 @@ - name: salmon index - command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c ./tests/config/nextflow.config -c ./tests/modules/salmon/index/nextflow.config tags: - salmon - salmon/index diff --git a/tests/modules/salmon/quant/main.nf b/tests/modules/salmon/quant/main.nf index ad15870c..a970f6c5 100644 --- a/tests/modules/salmon/quant/main.nf +++ b/tests/modules/salmon/quant/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [:] ) -include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' addParams( options: [args: '--minAssignedFrags 1'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' +include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' workflow test_salmon_quant_single_end { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -21,10 +24,13 @@ workflow test_salmon_quant_single_end { workflow test_salmon_quant_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -36,9 +42,12 @@ workflow test_salmon_quant_paired_end { workflow test_salmon_quant_single_end_lib_type_A { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/salmon/quant/nextflow.config b/tests/modules/salmon/quant/nextflow.config new file mode 100644 index 00000000..7a8c911a --- /dev/null +++ b/tests/modules/salmon/quant/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SALMON_QUANT { + ext.args = '--minAssignedFrags 1' + } + +} diff --git a/tests/modules/salmon/quant/test.yml b/tests/modules/salmon/quant/test.yml index d7ed0d0f..514718fa 100644 --- a/tests/modules/salmon/quant/test.yml +++ b/tests/modules/salmon/quant/test.yml @@ -1,5 +1,5 @@ - name: salmon quant single-end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -23,34 +23,34 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant paired end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -74,35 +74,35 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 221f754ed55dd1e34874f9b7b3f9d240 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant test_salmon_quant_single_end_lib_type_A - command: nextflow run tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon/quant - salmon @@ -126,26 +126,26 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: output/index/salmon/versionInfo.json + - path: output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: output/index/salmon/complete_ref_lens.bin + - path: output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/mphf.bin + - path: output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: output/index/salmon/duplicate_clusters.tsv + - path: output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: output/index/salmon/reflengths.bin + - path: output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/info.json + - path: output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: output/index/salmon/refAccumLengths.bin + - path: output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: output/index/salmon/ctg_offsets.bin + - path: output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: output/index/salmon/rank.bin + - path: output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin diff --git a/tests/modules/samblaster/main.nf b/tests/modules/samblaster/main.nf index 5983d130..5831ecfc 100644 --- a/tests/modules/samblaster/main.nf +++ b/tests/modules/samblaster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMBLASTER } from '../../../modules/samblaster/main.nf' addParams( options: [args: "-M --addMateTags", suffix:'.processed'] ) +include { SAMBLASTER } from '../../../modules/samblaster/main.nf' workflow test_samblaster { diff --git a/tests/modules/samblaster/nextflow.config b/tests/modules/samblaster/nextflow.config new file mode 100644 index 00000000..3018088b --- /dev/null +++ b/tests/modules/samblaster/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.suffix = '.processed' + } + +} diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml index d56d4330..acc6d0f0 100644 --- a/tests/modules/samblaster/test.yml +++ b/tests/modules/samblaster/test.yml @@ -1,5 +1,5 @@ - name: samblaster test_samblaster - command: nextflow run tests/modules/samblaster -entry test_samblaster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samblaster -entry test_samblaster -c ./tests/config/nextflow.config -c ./tests/modules/samblaster/nextflow.config tags: - samblaster files: diff --git a/tests/modules/samtools/ampliconclip/main.nf b/tests/modules/samtools/ampliconclip/main.nf index a8d8609f..eae70b06 100644 --- a/tests/modules/samtools/ampliconclip/main.nf +++ b/tests/modules/samtools/ampliconclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' addParams([:]) +include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' workflow test_samtools_ampliconclip_no_stats_no_rejects { diff --git a/tests/modules/samtools/ampliconclip/nextflow.config b/tests/modules/samtools/ampliconclip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/ampliconclip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index 9e8e1f9f..e8fd456c 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml samtools/ampliconclip - name: samtools ampliconclip no stats no rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip @@ -10,7 +8,7 @@ md5sum: 678f9ab04fbe3206f0f96e170fd833e9 - name: samtools ampliconclip no stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip @@ -21,7 +19,7 @@ md5sum: a0bee15aead020d16d0c81bd9667df46 - name: samtools ampliconclip with stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip diff --git a/tests/modules/samtools/bam2fq/main.nf b/tests/modules/samtools/bam2fq/main.nf index f8614ad0..928bfe08 100644 --- a/tests/modules/samtools/bam2fq/main.nf +++ b/tests/modules/samtools/bam2fq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' addParams( options: [args: "-T RX"] ) +include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' workflow test_samtools_bam2fq_nosplit { diff --git a/tests/modules/samtools/bam2fq/nextflow.config b/tests/modules/samtools/bam2fq/nextflow.config new file mode 100644 index 00000000..cf886bb2 --- /dev/null +++ b/tests/modules/samtools/bam2fq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + +} diff --git a/tests/modules/samtools/bam2fq/test.yml b/tests/modules/samtools/bam2fq/test.yml index ff1762b3..feb994fd 100644 --- a/tests/modules/samtools/bam2fq/test.yml +++ b/tests/modules/samtools/bam2fq/test.yml @@ -1,5 +1,5 @@ - name: samtools bam2fq test_samtools_bam2fq_nosplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools @@ -8,7 +8,7 @@ md5sum: d733e66d29a4b366bf9df8c42f845256 - name: samtools bam2fq test_samtools_bam2fq_withsplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools diff --git a/tests/modules/samtools/depth/main.nf b/tests/modules/samtools/depth/main.nf index 90497534..c6d2dc0e 100644 --- a/tests/modules/samtools/depth/main.nf +++ b/tests/modules/samtools/depth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' addParams( options: [:] ) +include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' workflow test_samtools_depth { diff --git a/tests/modules/samtools/depth/nextflow.config b/tests/modules/samtools/depth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/depth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/depth/test.yml b/tests/modules/samtools/depth/test.yml index 4d5007c8..978134ad 100644 --- a/tests/modules/samtools/depth/test.yml +++ b/tests/modules/samtools/depth/test.yml @@ -1,5 +1,5 @@ - name: samtools depth - command: nextflow run tests/modules/samtools/depth -entry test_samtools_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/depth -entry test_samtools_depth -c ./tests/config/nextflow.config -c ./tests/modules/samtools/depth/nextflow.config tags: - samtools/depth - samtools diff --git a/tests/modules/samtools/faidx/main.nf b/tests/modules/samtools/faidx/main.nf index 0102af28..bc47c847 100644 --- a/tests/modules/samtools/faidx/main.nf +++ b/tests/modules/samtools/faidx/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' workflow test_samtools_faidx { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/faidx/nextflow.config b/tests/modules/samtools/faidx/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/faidx/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index 49a92265..f0224f34 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,5 +1,5 @@ - name: samtools faidx test workflow - command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/faidx -entry test_samtools_faidx -c ./tests/config/nextflow.config -c ./tests/modules/samtools/faidx/nextflow.config tags: - samtools - samtools/faidx diff --git a/tests/modules/samtools/fastq/main.nf b/tests/modules/samtools/fastq/main.nf index 94ad9471..6e7e323c 100644 --- a/tests/modules/samtools/fastq/main.nf +++ b/tests/modules/samtools/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' workflow test_samtools_fastq { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/fastq/nextflow.config b/tests/modules/samtools/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/fastq/test.yml b/tests/modules/samtools/fastq/test.yml index bfcf5c92..39da9889 100644 --- a/tests/modules/samtools/fastq/test.yml +++ b/tests/modules/samtools/fastq/test.yml @@ -1,5 +1,5 @@ - name: samtools fastq test_samtools_fastq - command: nextflow run tests/modules/samtools/fastq -entry test_samtools_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fastq -entry test_samtools_fastq -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fastq/nextflow.config tags: - samtools - samtools/fastq diff --git a/tests/modules/samtools/fixmate/main.nf b/tests/modules/samtools/fixmate/main.nf index 5174beab..cb7c136d 100644 --- a/tests/modules/samtools/fixmate/main.nf +++ b/tests/modules/samtools/fixmate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' addParams( options: [args:'-r -c -m'] ) +include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' workflow test_samtools_fixmate { diff --git a/tests/modules/samtools/fixmate/nextflow.config b/tests/modules/samtools/fixmate/nextflow.config new file mode 100644 index 00000000..b9402bcf --- /dev/null +++ b/tests/modules/samtools/fixmate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_FIXMATE { + ext.args = '-r -c -m' + } + +} diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index 0b3aa2a9..8e87e059 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -1,5 +1,5 @@ - name: samtools fixmate test_samtools_fixmate - command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fixmate -entry test_samtools_fixmate -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fixmate/nextflow.config tags: - samtools - samtools/fixmate diff --git a/tests/modules/samtools/flagstat/main.nf b/tests/modules/samtools/flagstat/main.nf index a31a7d22..a0e86422 100644 --- a/tests/modules/samtools/flagstat/main.nf +++ b/tests/modules/samtools/flagstat/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' workflow test_samtools_flagstat { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + ] SAMTOOLS_FLAGSTAT ( input ) } diff --git a/tests/modules/samtools/flagstat/nextflow.config b/tests/modules/samtools/flagstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/flagstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/flagstat/test.yml b/tests/modules/samtools/flagstat/test.yml index 0da6c2f4..a5f28b36 100644 --- a/tests/modules/samtools/flagstat/test.yml +++ b/tests/modules/samtools/flagstat/test.yml @@ -1,5 +1,5 @@ - name: samtools flagstat - command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c ./tests/config/nextflow.config -c ./tests/modules/samtools/flagstat/nextflow.config tags: - samtools - samtools/flagstat diff --git a/tests/modules/samtools/idxstats/main.nf b/tests/modules/samtools/idxstats/main.nf index 9919c3e4..f3de76a0 100644 --- a/tests/modules/samtools/idxstats/main.nf +++ b/tests/modules/samtools/idxstats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' workflow test_samtools_idxstats { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/idxstats/nextflow.config b/tests/modules/samtools/idxstats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/idxstats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/idxstats/test.yml b/tests/modules/samtools/idxstats/test.yml index 6064ca56..88786eef 100644 --- a/tests/modules/samtools/idxstats/test.yml +++ b/tests/modules/samtools/idxstats/test.yml @@ -1,5 +1,5 @@ - name: samtools idxstats - command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/idxstats/nextflow.config tags: - samtools - samtools/idxstats diff --git a/tests/modules/samtools/index/main.nf b/tests/modules/samtools/index/main.nf index 737936fb..3592a99a 100644 --- a/tests/modules/samtools/index/main.nf +++ b/tests/modules/samtools/index/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' workflow test_samtools_index_bai { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/index/nextflow.config b/tests/modules/samtools/index/nextflow.config new file mode 100644 index 00000000..d3a4c785 --- /dev/null +++ b/tests/modules/samtools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_INDEX_CSI { + ext.args = '-c' + } + +} diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 279b99d8..6972ed65 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -1,5 +1,5 @@ - name: samtools index test_samtools_index_bai - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_bai -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_bai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index @@ -8,7 +8,7 @@ md5sum: 704c10dd1326482448ca3073fdebc2f4 - name: samtools index test_samtools_index_crai - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_crai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index @@ -17,7 +17,7 @@ md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 - name: samtools index test_samtools_index_csi - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index diff --git a/tests/modules/samtools/merge/main.nf b/tests/modules/samtools/merge/main.nf index 07485df1..ad5c56e3 100644 --- a/tests/modules/samtools/merge/main.nf +++ b/tests/modules/samtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' addParams( options: [suffix:'_merged'] ) +include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' workflow test_samtools_merge { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/samtools/merge/nextflow.config b/tests/modules/samtools/merge/nextflow.config new file mode 100644 index 00000000..cb350bf7 --- /dev/null +++ b/tests/modules/samtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_MERGE { + ext.suffix = '_merged' + } + +} diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index f04aa74b..948c6191 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,5 +1,5 @@ - name: samtools merge test_samtools_merge - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - samtools - samtools/merge @@ -7,7 +7,7 @@ - path: output/samtools/test_merged.bam - name: samtools merge test_samtools_merge_cram - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - samtools - samtools/merge diff --git a/tests/modules/samtools/mpileup/main.nf b/tests/modules/samtools/mpileup/main.nf index b8db0275..dc58cc2c 100644 --- a/tests/modules/samtools/mpileup/main.nf +++ b/tests/modules/samtools/mpileup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' addParams( options: [:] ) +include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' workflow test_samtools_mpileup { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/mpileup/nextflow.config b/tests/modules/samtools/mpileup/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/mpileup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 25c39d63..53a9c142 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: samtools mpileup - command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/samtools/mpileup/nextflow.config tags: - samtools - samtools/mpileup diff --git a/tests/modules/samtools/sort/main.nf b/tests/modules/samtools/sort/main.nf index b76cdb1a..9853b355 100644 --- a/tests/modules/samtools/sort/main.nf +++ b/tests/modules/samtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' addParams( options: ['suffix': '.sorted'] ) +include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' workflow test_samtools_sort { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/sort/nextflow.config b/tests/modules/samtools/sort/nextflow.config new file mode 100644 index 00000000..57ae6280 --- /dev/null +++ b/tests/modules/samtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_SORT { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 785ec03b..dfd2eb69 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -1,5 +1,5 @@ - name: samtools sort - command: nextflow run tests/modules/samtools/sort -entry test_samtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/sort -entry test_samtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/samtools/sort/nextflow.config tags: - samtools - samtools/sort diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 4e92b366..d83cbf4a 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' workflow test_samtools_stats { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/stats/nextflow.config b/tests/modules/samtools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index c186665a..178eba72 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,5 +1,5 @@ - name: samtools stats test_samtools_stats - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - samtools/stats - samtools @@ -8,7 +8,7 @@ md5sum: 09146eeecfcae2a84fb8615c86cd8d64 - name: samtools stats test_samtools_stats_cram - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - samtools/stats - samtools diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index bd270cd8..8ee27ef8 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' addParams( options: [:] ) +include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' workflow test_samtools_view { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/view/nextflow.config b/tests/modules/samtools/view/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/view/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/view/test.yml b/tests/modules/samtools/view/test.yml index ceaa0e89..1287d455 100644 --- a/tests/modules/samtools/view/test.yml +++ b/tests/modules/samtools/view/test.yml @@ -1,5 +1,5 @@ - name: samtools view test_samtools_view - command: nextflow run tests/modules/samtools/view -entry test_samtools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools @@ -8,7 +8,7 @@ md5sum: 8fb1e82f76416e9e30fc6b2357e2cf13 - name: samtools view test_samtools_view_cram - command: nextflow run tests/modules/samtools/view -entry test_samtools_view_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools diff --git a/tests/modules/scoary/main.nf b/tests/modules/scoary/main.nf index ec3f6e9f..5f080b7d 100644 --- a/tests/modules/scoary/main.nf +++ b/tests/modules/scoary/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SCOARY } from '../../../modules/scoary/main.nf' addParams( options: [:] ) +include { SCOARY } from '../../../modules/scoary/main.nf' workflow test_scoary { diff --git a/tests/modules/scoary/nextflow.config b/tests/modules/scoary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/scoary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/scoary/test.yml b/tests/modules/scoary/test.yml index c5269293..71344093 100644 --- a/tests/modules/scoary/test.yml +++ b/tests/modules/scoary/test.yml @@ -1,5 +1,5 @@ - name: scoary test_scoary - command: nextflow run tests/modules/scoary -entry test_scoary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/scoary -entry test_scoary -c ./tests/config/nextflow.config -c ./tests/modules/scoary/nextflow.config tags: - scoary files: diff --git a/tests/modules/seacr/callpeak/main.nf b/tests/modules/seacr/callpeak/main.nf index a1aeb76e..230d3a4c 100644 --- a/tests/modules/seacr/callpeak/main.nf +++ b/tests/modules/seacr/callpeak/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' addParams( options: [ args:'norm stringent' ] ) +include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' workflow test_seacr_callpeak { input = [ [ id:'test_1'], diff --git a/tests/modules/seacr/callpeak/nextflow.config b/tests/modules/seacr/callpeak/nextflow.config new file mode 100644 index 00000000..54c19e6b --- /dev/null +++ b/tests/modules/seacr/callpeak/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEACR_CALLPEAK { + ext.args = 'norm stringent' + } + +} diff --git a/tests/modules/seacr/callpeak/test.yml b/tests/modules/seacr/callpeak/test.yml index 2cf75b06..63104bd0 100644 --- a/tests/modules/seacr/callpeak/test.yml +++ b/tests/modules/seacr/callpeak/test.yml @@ -1,5 +1,5 @@ - name: seacr callpeak - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak @@ -8,7 +8,7 @@ md5sum: a3cb0c7c4ffa895788da3f0d6371b7df - name: seacr callpeak threshold - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak diff --git a/tests/modules/seqkit/split2/main.nf b/tests/modules/seqkit/split2/main.nf index 21626cac..acb9d41b 100644 --- a/tests/modules/seqkit/split2/main.nf +++ b/tests/modules/seqkit/split2/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-length 8K'] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-size 50' ] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-part 3'] ) +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' workflow test_seqkit_split2_single_end_length { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/seqkit/split2/nextflow.config b/tests/modules/seqkit/split2/nextflow.config new file mode 100644 index 00000000..e4f64931 --- /dev/null +++ b/tests/modules/seqkit/split2/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQKIT_SPLIT2_LENGTH { + ext.args = '--by-length 8K' + } + + withName: SEQKIT_SPLIT2_SIZE { + ext.args = '--by-size 50' + } + + withName: SEQKIT_SPLIT2_PART { + ext.args = '--by-part 3' + } + +} diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index 13f3b003..12b02072 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -1,5 +1,5 @@ - name: seqkit split2 single-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -10,7 +10,7 @@ md5sum: cf38c51506e45380fe25abdd1bd5ccc6 - name: seqkit split2 single-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -21,7 +21,7 @@ md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 - name: seqkit split2 single-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -34,7 +34,7 @@ md5sum: 8bc86ba83a611c54f592f4eae19b680f - name: seqkit split2 paired-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -49,7 +49,7 @@ md5sum: 927097c6ac7522199a9e016333181a8e - name: seqkit split2 paired-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -64,7 +64,7 @@ md5sum: 8796c3f327b1094244bfcdb36d536526 - name: seqkit split2 paired-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 diff --git a/tests/modules/seqsero2/main.nf b/tests/modules/seqsero2/main.nf index 04ee8e27..9587bf9f 100644 --- a/tests/modules/seqsero2/main.nf +++ b/tests/modules/seqsero2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' addParams( options: [args: '-m k -t 4'] ) +include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' workflow test_seqsero2 { diff --git a/tests/modules/seqsero2/nextflow.config b/tests/modules/seqsero2/nextflow.config new file mode 100644 index 00000000..b46fa7e2 --- /dev/null +++ b/tests/modules/seqsero2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQSERO2 { + ext.args = '-m k -t 4' + } + +} diff --git a/tests/modules/seqsero2/test.yml b/tests/modules/seqsero2/test.yml index 2aa49686..e2dec062 100644 --- a/tests/modules/seqsero2/test.yml +++ b/tests/modules/seqsero2/test.yml @@ -1,5 +1,5 @@ - name: seqsero2 test_seqsero2 - command: nextflow run tests/modules/seqsero2 -entry test_seqsero2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqsero2 -entry test_seqsero2 -c ./tests/config/nextflow.config -c ./tests/modules/seqsero2/nextflow.config tags: - seqsero2 files: diff --git a/tests/modules/seqtk/mergepe/main.nf b/tests/modules/seqtk/mergepe/main.nf index 13654dc6..b8e12213 100644 --- a/tests/modules/seqtk/mergepe/main.nf +++ b/tests/modules/seqtk/mergepe/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [ 'suffix':'.processed' ] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/mergepe/nextflow.config b/tests/modules/seqtk/mergepe/nextflow.config new file mode 100644 index 00000000..b14e72ff --- /dev/null +++ b/tests/modules/seqtk/mergepe/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_MERGEPE { + ext.suffix = '.processed' + } + +} diff --git a/tests/modules/seqtk/mergepe/test.yml b/tests/modules/seqtk/mergepe/test.yml index 8ae95354..2a6d4d33 100644 --- a/tests/modules/seqtk/mergepe/test.yml +++ b/tests/modules/seqtk/mergepe/test.yml @@ -1,5 +1,5 @@ - name: seqtk mergepe test_seqtk_mergepe_single_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk @@ -8,7 +8,7 @@ md5sum: e325ef7deb4023447a1f074e285761af - name: seqtk mergepe test_seqtk_mergepe_paired_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk diff --git a/tests/modules/seqtk/sample/main.nf b/tests/modules/seqtk/sample/main.nf index 4508db84..6899ef62 100644 --- a/tests/modules/seqtk/sample/main.nf +++ b/tests/modules/seqtk/sample/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' addParams( options: [ 'args': '-s100', 'suffix':'.sampled' ] ) +include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/sample/nextflow.config b/tests/modules/seqtk/sample/nextflow.config new file mode 100644 index 00000000..3efac50d --- /dev/null +++ b/tests/modules/seqtk/sample/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SAMPLE { + ext.args = '-s100' + ext.suffix = '.sampled' + } + +} diff --git a/tests/modules/seqtk/sample/test.yml b/tests/modules/seqtk/sample/test.yml index d4cf2ca9..df24b3a4 100644 --- a/tests/modules/seqtk/sample/test.yml +++ b/tests/modules/seqtk/sample/test.yml @@ -1,5 +1,5 @@ - name: seqtk sample test_seqtk_sample_single_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample @@ -8,7 +8,7 @@ md5sum: 73c3e8f113860244f3ed3866a8b9d555 - name: seqtk sample test_seqtk_sample_paired_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample diff --git a/tests/modules/seqtk/subseq/main.nf b/tests/modules/seqtk/subseq/main.nf index 7c5dc7b2..608b7c2f 100644 --- a/tests/modules/seqtk/subseq/main.nf +++ b/tests/modules/seqtk/subseq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' addParams( options: ['suffix':'.filtered'] ) +include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' workflow test_seqtk_subseq { diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config new file mode 100644 index 00000000..c61c4a74 --- /dev/null +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SUBSEQ { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/seqtk/subseq/test.yml b/tests/modules/seqtk/subseq/test.yml index fca64804..4003e3ab 100644 --- a/tests/modules/seqtk/subseq/test.yml +++ b/tests/modules/seqtk/subseq/test.yml @@ -1,5 +1,5 @@ - name: seqtk subseq test_seqtk_subseq - command: nextflow run tests/modules/seqtk/subseq -entry test_seqtk_subseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/subseq -entry test_seqtk_subseq -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/subseq/nextflow.config tags: - seqtk - seqtk/subseq diff --git a/tests/modules/sequenzautils/bam2seqz/main.nf b/tests/modules/sequenzautils/bam2seqz/main.nf index ae478b88..fcd4c7c7 100755 --- a/tests/modules/sequenzautils/bam2seqz/main.nf +++ b/tests/modules/sequenzautils/bam2seqz/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' addParams( options: [:] ) +include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' workflow test_sequenzautils_bam2seqz { diff --git a/tests/modules/sequenzautils/bam2seqz/nextflow.config b/tests/modules/sequenzautils/bam2seqz/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sequenzautils/bam2seqz/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sequenzautils/bam2seqz/test.yml b/tests/modules/sequenzautils/bam2seqz/test.yml index 0b9cac53..f3ea6cf0 100644 --- a/tests/modules/sequenzautils/bam2seqz/test.yml +++ b/tests/modules/sequenzautils/bam2seqz/test.yml @@ -1,5 +1,5 @@ - name: sequenzautils bam2seqz - command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/bam2seqz/nextflow.config tags: - sequenzautils - sequenzautils/bam2seqz diff --git a/tests/modules/sequenzautils/gcwiggle/main.nf b/tests/modules/sequenzautils/gcwiggle/main.nf index e314f1e0..b25e037e 100644 --- a/tests/modules/sequenzautils/gcwiggle/main.nf +++ b/tests/modules/sequenzautils/gcwiggle/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' addParams( options: [ 'args': '-w 50' ] ) +include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' workflow test_sequenzautils_gcwiggle { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/sequenzautils/gcwiggle/nextflow.config b/tests/modules/sequenzautils/gcwiggle/nextflow.config new file mode 100644 index 00000000..62e68935 --- /dev/null +++ b/tests/modules/sequenzautils/gcwiggle/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQUENZAUTILS_GCWIGGLE { + ext.args = '-w 50' + } + +} diff --git a/tests/modules/sequenzautils/gcwiggle/test.yml b/tests/modules/sequenzautils/gcwiggle/test.yml index aa7a3167..21ddc4ab 100644 --- a/tests/modules/sequenzautils/gcwiggle/test.yml +++ b/tests/modules/sequenzautils/gcwiggle/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml sequenzautils/gcwiggle - name: sequenzautils gcwiggle - command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/gcwiggle/nextflow.config tags: - sequenzautils - sequenzautils/gcwiggle diff --git a/tests/modules/seqwish/induce/main.nf b/tests/modules/seqwish/induce/main.nf index 356ca705..6388fea2 100644 --- a/tests/modules/seqwish/induce/main.nf +++ b/tests/modules/seqwish/induce/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' addParams( options: [:] ) +include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' workflow test_seqwish_induce { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/seqwish/induce/nextflow.config b/tests/modules/seqwish/induce/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/seqwish/induce/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/seqwish/induce/test.yml b/tests/modules/seqwish/induce/test.yml index d27de3c4..d5a8a7cd 100644 --- a/tests/modules/seqwish/induce/test.yml +++ b/tests/modules/seqwish/induce/test.yml @@ -1,5 +1,5 @@ - name: seqwish induce - command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c ./tests/config/nextflow.config -c ./tests/modules/seqwish/induce/nextflow.config tags: - seqwish - seqwish/induce diff --git a/tests/modules/shovill/main.nf b/tests/modules/shovill/main.nf index acc65169..2416022f 100644 --- a/tests/modules/shovill/main.nf +++ b/tests/modules/shovill/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { SHOVILL } from '../../../modules/shovill/main.nf' addParams( options: [args: '--gsize 2800000 --kmers 31'] ) -include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler skesa --gsize 2800000'] ) -include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler megahit --gsize 2800000'] ) -include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler velvet --gsize 2800000'] ) +include { SHOVILL } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' workflow test_shovill { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/shovill/nextflow.config b/tests/modules/shovill/nextflow.config new file mode 100644 index 00000000..0599f80b --- /dev/null +++ b/tests/modules/shovill/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SHOVILL { + ext.args = '--gsize 2800000 --kmers 31' + } + + withName: SHOVILL_SKESA { + ext.args = '--assembler skesa --gsize 2800000' + } + + withName: SHOVILL_MEGAHIT { + ext.args = '--assembler megahit --gsize 2800000' + } + + withName: SHOVILL_VELVET { + ext.args = '--assembler velvet --gsize 2800000' + } + +} diff --git a/tests/modules/shovill/test.yml b/tests/modules/shovill/test.yml index a716bc66..6fdd2f3f 100644 --- a/tests/modules/shovill/test.yml +++ b/tests/modules/shovill/test.yml @@ -1,5 +1,5 @@ - name: shovill with spades - command: nextflow run ./tests/modules/shovill -entry test_shovill -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -13,7 +13,7 @@ - path: output/shovill/shovill.log - name: shovill with megahit - command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -26,7 +26,7 @@ - path: output/shovill/shovill.log - name: shovill with skesa - command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -39,7 +39,7 @@ - path: output/shovill/shovill.log - name: shovill with velvet - command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: diff --git a/tests/modules/snpdists/main.nf b/tests/modules/snpdists/main.nf index 8a29effa..be6d745c 100644 --- a/tests/modules/snpdists/main.nf +++ b/tests/modules/snpdists/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPDISTS } from '../../../modules/snpdists/main.nf' addParams( options: [:] ) +include { SNPDISTS } from '../../../modules/snpdists/main.nf' workflow test_snpdists { diff --git a/tests/modules/snpdists/nextflow.config b/tests/modules/snpdists/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpdists/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpdists/test.yml b/tests/modules/snpdists/test.yml index d140ce6e..c23945ce 100644 --- a/tests/modules/snpdists/test.yml +++ b/tests/modules/snpdists/test.yml @@ -1,5 +1,5 @@ - name: snpdists - command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c ./tests/config/nextflow.config -c ./tests/modules/snpdists/nextflow.config tags: - snpdists files: diff --git a/tests/modules/snpeff/main.nf b/tests/modules/snpeff/main.nf index 923f98f4..4e8a982d 100644 --- a/tests/modules/snpeff/main.nf +++ b/tests/modules/snpeff/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SNPEFF } from '../../../modules/snpeff/main.nf' addParams( snpeff_tag: '5.0.WBcel235', use_cache: false ) +include { SNPEFF } from '../../../modules/snpeff/main.nf' workflow test_snpeff { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + SNPEFF ( input, "WBcel235.99", [] ) } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config new file mode 100644 index 00000000..589c8cfb --- /dev/null +++ b/tests/modules/snpeff/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + ext.snpeff_tag = '5.0.WBcel235' + ext.use_cache = false + } + +} diff --git a/tests/modules/snpeff/test.yml b/tests/modules/snpeff/test.yml index 44eba200..8f4d980f 100644 --- a/tests/modules/snpeff/test.yml +++ b/tests/modules/snpeff/test.yml @@ -1,5 +1,5 @@ - name: snpeff test_snpeff - command: nextflow run tests/modules/snpeff -entry test_snpeff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpeff -entry test_snpeff -c ./tests/config/nextflow.config -c ./tests/modules/snpeff/nextflow.config tags: - snpeff files: diff --git a/tests/modules/snpsites/main.nf b/tests/modules/snpsites/main.nf index df2a6852..f7801673 100644 --- a/tests/modules/snpsites/main.nf +++ b/tests/modules/snpsites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPSITES } from '../../../modules/snpsites/main.nf' addParams( options: [:] ) +include { SNPSITES } from '../../../modules/snpsites/main.nf' workflow test_snpsites { diff --git a/tests/modules/snpsites/nextflow.config b/tests/modules/snpsites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpsites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpsites/test.yml b/tests/modules/snpsites/test.yml index 8361cd05..d9c19cd5 100644 --- a/tests/modules/snpsites/test.yml +++ b/tests/modules/snpsites/test.yml @@ -1,5 +1,5 @@ - name: snpsites - command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c ./tests/config/nextflow.config -c ./tests/modules/snpsites/nextflow.config tags: - snpsites files: diff --git a/tests/modules/spades/main.nf b/tests/modules/spades/main.nf index b09a4266..3710eeb7 100644 --- a/tests/modules/spades/main.nf +++ b/tests/modules/spades/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SPADES } from '../../../modules/spades/main.nf' addParams( options: ['args': '--rnaviral'] ) +include { SPADES } from '../../../modules/spades/main.nf' workflow test_spades_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/spades/nextflow.config b/tests/modules/spades/nextflow.config new file mode 100644 index 00000000..5fabafae --- /dev/null +++ b/tests/modules/spades/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPADES { + ext.args = '--rnaviral' + } + +} diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index a400e79d..98bc9c8c 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -1,5 +1,5 @@ - name: spades test_spades_single_end - command: nextflow run tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -12,7 +12,7 @@ - path: output/spades/test.spades.log - name: spades test_spades_paired_end - command: nextflow run tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -24,7 +24,7 @@ - path: output/spades/warnings.log - name: spades test_spades_illumina_nanopore - command: nextflow run tests/modules/spades -entry test_spades_illumina_nanopore -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -38,7 +38,7 @@ - path: output/spades/warnings.log - name: spades test_spades_illumina_pacbio - command: nextflow run tests/modules/spades -entry test_spades_illumina_pacbio -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_pacbio -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: diff --git a/tests/modules/spatyper/main.nf b/tests/modules/spatyper/main.nf index 65729cc0..655845c7 100644 --- a/tests/modules/spatyper/main.nf +++ b/tests/modules/spatyper/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { SPATYPER } from '../../../modules/spatyper/main.nf' addParams( options: [:] ) -include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' addParams( options: [args: '--do_enrich'] ) +include { SPATYPER } from '../../../modules/spatyper/main.nf' +include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' workflow test_spatyper { input = [ [ id:'test' ], diff --git a/tests/modules/spatyper/nextflow.config b/tests/modules/spatyper/nextflow.config new file mode 100644 index 00000000..ac90a452 --- /dev/null +++ b/tests/modules/spatyper/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPATYPER_ENRICH { + ext.args = '--do_enrich' + } + +} diff --git a/tests/modules/spatyper/test.yml b/tests/modules/spatyper/test.yml index 49516812..6e1f8144 100644 --- a/tests/modules/spatyper/test.yml +++ b/tests/modules/spatyper/test.yml @@ -1,5 +1,5 @@ - name: spatyper test_spatyper - command: nextflow run tests/modules/spatyper -entry test_spatyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: @@ -7,7 +7,7 @@ md5sum: a698352823875171696e5e7ed7015c13 - name: spatyper test_spatyper_enrich - command: nextflow run tests/modules/spatyper -entry test_spatyper_enrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper_enrich -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: diff --git a/tests/modules/sratools/fasterqdump/main.nf b/tests/modules/sratools/fasterqdump/main.nf index 1a0e0c7a..2f838fd2 100644 --- a/tests/modules/sratools/fasterqdump/main.nf +++ b/tests/modules/sratools/fasterqdump/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' addParams( options: [:] ) +include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' workflow test_sratools_fasterqdump_single_end { diff --git a/tests/modules/sratools/fasterqdump/nextflow.config b/tests/modules/sratools/fasterqdump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/fasterqdump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml index 7d022a0d..64cf2404 100644 --- a/tests/modules/sratools/fasterqdump/test.yml +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -1,7 +1,8 @@ - name: sratools fasterqdump test_sratools_fasterqdump_single_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - - subworkflows/sra_fastq + - sratools + - sratools/fasterqdump files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -9,9 +10,10 @@ md5sum: 466d05dafb2eec672150754168010b4d - name: sratools fasterqdump test_sratools_fasterqdump_paired_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - - subworkflows/sra_fastq + - sratools + - sratools/fasterqdump files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 diff --git a/tests/modules/sratools/prefetch/main.nf b/tests/modules/sratools/prefetch/main.nf index 99439a7f..aa6252a1 100644 --- a/tests/modules/sratools/prefetch/main.nf +++ b/tests/modules/sratools/prefetch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' addParams( options: [:] ) +include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' workflow test_sratools_prefetch { diff --git a/tests/modules/sratools/prefetch/nextflow.config b/tests/modules/sratools/prefetch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/prefetch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/prefetch/test.yml b/tests/modules/sratools/prefetch/test.yml index c23db12a..a2efef77 100644 --- a/tests/modules/sratools/prefetch/test.yml +++ b/tests/modules/sratools/prefetch/test.yml @@ -1,5 +1,5 @@ - name: sratools prefetch test_sratools_prefetch - command: nextflow run tests/modules/sratools/prefetch -entry test_sratools_prefetch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/prefetch -entry test_sratools_prefetch -c ./tests/config/nextflow.config -c ./tests/modules/sratools/prefetch/nextflow.config tags: - sratools/prefetch - sratools diff --git a/tests/modules/staphopiasccmec/main.nf b/tests/modules/staphopiasccmec/main.nf index ec1b48e4..8ea310ce 100644 --- a/tests/modules/staphopiasccmec/main.nf +++ b/tests/modules/staphopiasccmec/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [:] ) -include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [args: '--hamming'] ) +include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' +include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' workflow test_staphopiasccmec { diff --git a/tests/modules/staphopiasccmec/nextflow.config b/tests/modules/staphopiasccmec/nextflow.config new file mode 100644 index 00000000..7ee97c2f --- /dev/null +++ b/tests/modules/staphopiasccmec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAPHOPIASCCMEC_HAMMING { + ext.args = '--hamming' + } + +} diff --git a/tests/modules/staphopiasccmec/test.yml b/tests/modules/staphopiasccmec/test.yml index aadfec3e..ac3f66da 100644 --- a/tests/modules/staphopiasccmec/test.yml +++ b/tests/modules/staphopiasccmec/test.yml @@ -1,5 +1,5 @@ - name: staphopiasccmec test_staphopiasccmec - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: @@ -7,7 +7,7 @@ md5sum: e6460d4164f3af5b290c5ccdb11343bf - name: staphopiasccmec test_staphopiasccmec_hamming - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index d7a7ef96..bf305d54 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,51 +2,77 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9']) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' workflow test_star_alignment_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_fusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_starfusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = false + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } diff --git a/tests/modules/star/align/nextflow.config b/tests/modules/star/align/nextflow.config new file mode 100644 index 00000000..751f7837 --- /dev/null +++ b/tests/modules/star/align/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 9' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat' + } + + withName: STAR_FOR_ARRIBA { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + + withName: STAR_FOR_STARFUSION { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30' + } + +} diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 47731c5c..af5bebe5 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -1,39 +1,39 @@ - name: star align test_star_alignment_single_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_single_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: b9f5e2f6a624b64c300fe25dc3ac801f @@ -43,41 +43,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: 38d08f0b944a2a1b981a250d675aa0d9 @@ -87,41 +87,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_fusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: c740d5177067c1fcc48ab7a16cd639d7 @@ -131,41 +131,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_starfusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: a1bd1b40950a58ea2776908076160052 diff --git a/tests/modules/star/genomegenerate/main.nf b/tests/modules/star/genomegenerate/main.nf index 7f9e3072..31601478 100644 --- a/tests/modules/star/genomegenerate/main.nf +++ b/tests/modules/star/genomegenerate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [publish_dir:'star'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' workflow test_star_genomegenerate { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/star/genomegenerate/nextflow.config b/tests/modules/star/genomegenerate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/star/genomegenerate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index df8d5efc..0e397009 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -1,5 +1,5 @@ - name: star genomegenerate test_star_genomegenerate - command: nextflow run tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/genomegenerate -entry test_star_genomegenerate -c ./tests/config/nextflow.config -c ./tests/modules/star/genomegenerate/nextflow.config tags: - star - star/genomegenerate diff --git a/tests/modules/strelka/germline/main.nf b/tests/modules/strelka/germline/main.nf index 0d5193bb..c50d76e1 100644 --- a/tests/modules/strelka/germline/main.nf +++ b/tests/modules/strelka/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] ) +include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' workflow test_strelka_germline { input = [ diff --git a/tests/modules/strelka/germline/nextflow.config b/tests/modules/strelka/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/germline/test.yml b/tests/modules/strelka/germline/test.yml index a3ab3ef6..8db81aa0 100644 --- a/tests/modules/strelka/germline/test.yml +++ b/tests/modules/strelka/germline/test.yml @@ -1,5 +1,5 @@ - name: strelka germline test_strelka_germline - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline @@ -10,7 +10,7 @@ - path: output/strelka/test.variants.vcf.gz.tbi - name: strelka germline test_strelka_germline_target_bed - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline diff --git a/tests/modules/strelka/somatic/main.nf b/tests/modules/strelka/somatic/main.nf index 60127f58..b1d4efeb 100644 --- a/tests/modules/strelka/somatic/main.nf +++ b/tests/modules/strelka/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' addParams( options: [:] ) +include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' workflow test_strelka_somatic { diff --git a/tests/modules/strelka/somatic/nextflow.config b/tests/modules/strelka/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/somatic/test.yml b/tests/modules/strelka/somatic/test.yml index b461d335..a56f955a 100644 --- a/tests/modules/strelka/somatic/test.yml +++ b/tests/modules/strelka/somatic/test.yml @@ -1,5 +1,5 @@ - name: strelka somatic test_strelka_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic @@ -12,7 +12,7 @@ md5sum: 4cb176febbc8c26d717a6c6e67b9c905 - name: strelka somatic test_strelka__best_practices_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic diff --git a/tests/modules/stringtie/merge/main.nf b/tests/modules/stringtie/merge/main.nf index 49ff5a41..7851e755 100644 --- a/tests/modules/stringtie/merge/main.nf +++ b/tests/modules/stringtie/merge/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) -include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' +include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' /* * Test with forward strandedness diff --git a/tests/modules/stringtie/merge/nextflow.config b/tests/modules/stringtie/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index e6436612..392a1d7c 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -1,5 +1,5 @@ - name: stringtie merge forward-strand - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge @@ -24,7 +24,7 @@ md5sum: 0e42709bfe30c2c7f2574ba664f5fa9f - name: stringtie merge test_stringtie_reverse_merge - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge diff --git a/tests/modules/stringtie/stringtie/main.nf b/tests/modules/stringtie/stringtie/main.nf index b902cc41..ae6abe67 100644 --- a/tests/modules/stringtie/stringtie/main.nf +++ b/tests/modules/stringtie/stringtie/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' // // Test with forward strandedness // diff --git a/tests/modules/stringtie/stringtie/nextflow.config b/tests/modules/stringtie/stringtie/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/stringtie/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/stringtie/test.yml b/tests/modules/stringtie/stringtie/test.yml index 28c1b3c2..732b9fd1 100644 --- a/tests/modules/stringtie/stringtie/test.yml +++ b/tests/modules/stringtie/stringtie/test.yml @@ -1,5 +1,5 @@ - name: stringtie stringtie forward - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie @@ -21,7 +21,7 @@ md5sum: e981c0038295ae54b63cedb1083f1540 - name: stringtie stringtie reverse - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie diff --git a/tests/modules/subread/featurecounts/main.nf b/tests/modules/subread/featurecounts/main.nf index eae60f80..a8fa5c75 100644 --- a/tests/modules/subread/featurecounts/main.nf +++ b/tests/modules/subread/featurecounts/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' addParams( options: [args:'-t CDS'] ) +include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' workflow test_subread_featurecounts_forward { diff --git a/tests/modules/subread/featurecounts/nextflow.config b/tests/modules/subread/featurecounts/nextflow.config new file mode 100644 index 00000000..d9fd4fd5 --- /dev/null +++ b/tests/modules/subread/featurecounts/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SUBREAD_FEATURECOUNTS { + ext.args = '-t CDS' + } + +} diff --git a/tests/modules/subread/featurecounts/test.yml b/tests/modules/subread/featurecounts/test.yml index be6bed47..7cc24457 100644 --- a/tests/modules/subread/featurecounts/test.yml +++ b/tests/modules/subread/featurecounts/test.yml @@ -1,5 +1,5 @@ - name: subread featurecounts test_subread_featurecounts_forward - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -10,7 +10,7 @@ md5sum: 8f602ff9a8ef467af43294e80b367cdf - name: subread featurecounts test_subread_featurecounts_reverse - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -21,7 +21,7 @@ md5sum: 7cfa30ad678b9bc1bc63afbb0281547b - name: subread featurecounts test_subread_featurecounts_unstranded - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts diff --git a/tests/modules/tabix/bgzip/main.nf b/tests/modules/tabix/bgzip/main.nf index 8756b17d..4d349890 100644 --- a/tests/modules/tabix/bgzip/main.nf +++ b/tests/modules/tabix/bgzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' addParams( options: [:] ) +include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' workflow test_tabix_bgzip { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgzip/nextflow.config b/tests/modules/tabix/bgzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tabix/bgzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tabix/bgzip/test.yml b/tests/modules/tabix/bgzip/test.yml index 58412979..19357655 100644 --- a/tests/modules/tabix/bgzip/test.yml +++ b/tests/modules/tabix/bgzip/test.yml @@ -1,5 +1,5 @@ - name: tabix bgzip - command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config tags: - tabix - tabix/bgzip diff --git a/tests/modules/tabix/bgziptabix/main.nf b/tests/modules/tabix/bgziptabix/main.nf index 51e242fd..b2ff70d0 100644 --- a/tests/modules/tabix/bgziptabix/main.nf +++ b/tests/modules/tabix/bgziptabix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' addParams( options: ['args2': '-p vcf'] ) +include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' workflow test_tabix_bgziptabix { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgziptabix/nextflow.config b/tests/modules/tabix/bgziptabix/nextflow.config new file mode 100644 index 00000000..041bfa6a --- /dev/null +++ b/tests/modules/tabix/bgziptabix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BGZIPTABIX { + ext.args2 = '-p vcf' + } + +} diff --git a/tests/modules/tabix/bgziptabix/test.yml b/tests/modules/tabix/bgziptabix/test.yml index 31048109..1bcfa88a 100644 --- a/tests/modules/tabix/bgziptabix/test.yml +++ b/tests/modules/tabix/bgziptabix/test.yml @@ -1,5 +1,5 @@ - name: tabix bgziptabix - command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgziptabix/nextflow.config tags: - tabix - tabix/bgziptabix diff --git a/tests/modules/tabix/tabix/main.nf b/tests/modules/tabix/tabix/main.nf index 0963ffcd..993ee812 100644 --- a/tests/modules/tabix/tabix/main.nf +++ b/tests/modules/tabix/tabix/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p bed'] ) -include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p gff'] ) -include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p vcf'] ) +include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' workflow test_tabix_tabix_bed { input = [ [ id:'B.bed' ], // meta map diff --git a/tests/modules/tabix/tabix/nextflow.config b/tests/modules/tabix/tabix/nextflow.config new file mode 100644 index 00000000..aa97a873 --- /dev/null +++ b/tests/modules/tabix/tabix/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BED { + ext.args = '-p bed' + } + + withName: TABIX_GFF { + ext.args = '-p gff' + } + + withName: TABIX_VCF { + ext.args = '-p vcf' + } + +} diff --git a/tests/modules/tabix/tabix/test.yml b/tests/modules/tabix/tabix/test.yml index 646215c8..46be28dd 100644 --- a/tests/modules/tabix/tabix/test.yml +++ b/tests/modules/tabix/tabix/test.yml @@ -1,5 +1,5 @@ - name: tabix tabix bed - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -7,7 +7,7 @@ - path: ./output/tabix/test.bed.gz.tbi md5sum: 5b40851ab6b8ccf7946313c86481c0df - name: tabix tabix gff - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -15,7 +15,7 @@ - path: ./output/tabix/genome.gff3.gz.tbi md5sum: f79a67d95a98076e04fbe0455d825926 - name: tabix tabix vcf - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix diff --git a/tests/modules/tbprofiler/profile/main.nf b/tests/modules/tbprofiler/profile/main.nf index e0c6ef56..0141a77f 100644 --- a/tests/modules/tbprofiler/profile/main.nf +++ b/tests/modules/tbprofiler/profile/main.nf @@ -2,23 +2,27 @@ nextflow.enable.dsl = 2 -include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_ILLUMINA } from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform illumina'] ) -include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_NANOPORE} from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform nanopore'] ) +include { TBPROFILER_PROFILE } from '../../../../modules/tbprofiler/profile/main.nf' workflow test_tbprofiler_profile_illumina { - - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - TBPROFILER_PROFILE_ILLUMINA ( input ) + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + TBPROFILER_PROFILE ( input ) } - workflow test_tbprofiler_profile_nanopore { - - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - TBPROFILER_PROFILE_NANOPORE ( input ) + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + + TBPROFILER_PROFILE ( input ) } diff --git a/tests/modules/tbprofiler/profile/nextflow.config b/tests/modules/tbprofiler/profile/nextflow.config new file mode 100644 index 00000000..50cb99c6 --- /dev/null +++ b/tests/modules/tbprofiler/profile/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TBPROFILER_PROFILE_ILLUMINA { + ext.args = '--platform illumina' + } + + withName: TBPROFILER_PROFILE_NANOPORE { + ext.args = '--platform nanopore' + } + +} diff --git a/tests/modules/tbprofiler/profile/test.yml b/tests/modules/tbprofiler/profile/test.yml index abfb552d..8b40f1fa 100644 --- a/tests/modules/tbprofiler/profile/test.yml +++ b/tests/modules/tbprofiler/profile/test.yml @@ -1,5 +1,5 @@ - name: tbprofiler profile illumina - command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config tags: - tbprofiler - tbprofiler/profile @@ -10,7 +10,7 @@ - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz - name: tbprofiler profile nanopore - command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config tags: - tbprofiler - tbprofiler/profile diff --git a/tests/modules/tiddit/cov/main.nf b/tests/modules/tiddit/cov/main.nf index aed3516c..1bb35145 100644 --- a/tests/modules/tiddit/cov/main.nf +++ b/tests/modules/tiddit/cov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' addParams( options: [:] ) +include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' workflow test_tiddit_cov { diff --git a/tests/modules/tiddit/cov/nextflow.config b/tests/modules/tiddit/cov/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/cov/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/cov/test.yml b/tests/modules/tiddit/cov/test.yml index c2aa6439..90c4cbb3 100644 --- a/tests/modules/tiddit/cov/test.yml +++ b/tests/modules/tiddit/cov/test.yml @@ -1,5 +1,5 @@ - name: tiddit cov test_tiddit_cov - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov @@ -8,7 +8,7 @@ md5sum: f7974948f809f94879d8a60b726194f5 - name: tiddit cov test_tiddit_cov_no_ref - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov diff --git a/tests/modules/tiddit/sv/main.nf b/tests/modules/tiddit/sv/main.nf index 8a5a8140..8dae4950 100644 --- a/tests/modules/tiddit/sv/main.nf +++ b/tests/modules/tiddit/sv/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' addParams( options: [:] ) +include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' workflow test_tiddit_sv { input = [ diff --git a/tests/modules/tiddit/sv/nextflow.config b/tests/modules/tiddit/sv/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/sv/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/sv/test.yml b/tests/modules/tiddit/sv/test.yml index ed19bf14..168d21c5 100644 --- a/tests/modules/tiddit/sv/test.yml +++ b/tests/modules/tiddit/sv/test.yml @@ -1,5 +1,5 @@ - name: tiddit sv - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv @@ -11,7 +11,7 @@ - path: output/tiddit/test.vcf - name: tiddit sv no ref - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv diff --git a/tests/modules/trimgalore/main.nf b/tests/modules/trimgalore/main.nf index 3001469d..adeda539 100644 --- a/tests/modules/trimgalore/main.nf +++ b/tests/modules/trimgalore/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' addParams( options: [:] ) +include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' // // Test with single-end data diff --git a/tests/modules/trimgalore/nextflow.config b/tests/modules/trimgalore/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/trimgalore/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/trimgalore/test.yml b/tests/modules/trimgalore/test.yml index c176f592..ecbd2b5a 100644 --- a/tests/modules/trimgalore/test.yml +++ b/tests/modules/trimgalore/test.yml @@ -1,5 +1,5 @@ - name: trimgalore single-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: @@ -9,7 +9,7 @@ - path: ./output/trimgalore/test_trimmed.fq.gz - name: trimgalore paired-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: diff --git a/tests/modules/ucsc/bed12tobigbed/main.nf b/tests/modules/ucsc/bed12tobigbed/main.nf index 8ed64166..7590fc0e 100644 --- a/tests/modules/ucsc/bed12tobigbed/main.nf +++ b/tests/modules/ucsc/bed12tobigbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' addParams( options: [:] ) +include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' workflow test_ucsc_bed12tobigbed { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/ucsc/bed12tobigbed/nextflow.config b/tests/modules/ucsc/bed12tobigbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bed12tobigbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bed12tobigbed/test.yml b/tests/modules/ucsc/bed12tobigbed/test.yml index e0ee6f75..6bd4262d 100644 --- a/tests/modules/ucsc/bed12tobigbed/test.yml +++ b/tests/modules/ucsc/bed12tobigbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bed12tobigbed - command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bed12tobigbed/nextflow.config tags: - ucsc/bed12tobigbed files: diff --git a/tests/modules/ucsc/bedclip/main.nf b/tests/modules/ucsc/bedclip/main.nf index 162c2eb4..8ccfd3b0 100755 --- a/tests/modules/ucsc/bedclip/main.nf +++ b/tests/modules/ucsc/bedclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' addParams( options: [suffix:'.clip'] ) +include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' workflow test_ucsc_bedclip { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/ucsc/bedclip/nextflow.config b/tests/modules/ucsc/bedclip/nextflow.config new file mode 100644 index 00000000..4adc3b8f --- /dev/null +++ b/tests/modules/ucsc/bedclip/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: UCSC_BEDCLIP { + ext.suffix = '.clip' + } + +} diff --git a/tests/modules/ucsc/bedclip/test.yml b/tests/modules/ucsc/bedclip/test.yml index 103795da..bcf22c71 100755 --- a/tests/modules/ucsc/bedclip/test.yml +++ b/tests/modules/ucsc/bedclip/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedclip - command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedclip/nextflow.config tags: - ucsc - ucsc/bedclip diff --git a/tests/modules/ucsc/bedgraphtobigwig/main.nf b/tests/modules/ucsc/bedgraphtobigwig/main.nf index 8d83e235..c6db7225 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/main.nf +++ b/tests/modules/ucsc/bedgraphtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' workflow test_ucsc_bedgraphtobigwig { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/ucsc/bedgraphtobigwig/nextflow.config b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bedgraphtobigwig/test.yml b/tests/modules/ucsc/bedgraphtobigwig/test.yml index 726a07ca..c00a0231 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/test.yml +++ b/tests/modules/ucsc/bedgraphtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedgraphtobigwig - command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedgraphtobigwig/nextflow.config tags: - ucsc/bedgraphtobigwig files: diff --git a/tests/modules/ucsc/bigwigaverageoverbed/main.nf b/tests/modules/ucsc/bigwigaverageoverbed/main.nf index 9bd5a5e2..3b20dc32 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/tests/modules/ucsc/bigwigaverageoverbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' addParams( options: [:] ) +include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' workflow test_ucsc_bigwigaverageoverbed { input = [ diff --git a/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bigwigaverageoverbed/test.yml b/tests/modules/ucsc/bigwigaverageoverbed/test.yml index 641e9be5..7344c944 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/test.yml +++ b/tests/modules/ucsc/bigwigaverageoverbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bigwigaverageoverbed test_ucsc_bigwigaverageoverbed - command: nextflow run tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bigwigaverageoverbed/nextflow.config tags: - ucsc - ucsc/bigwigaverageoverbed diff --git a/tests/modules/ucsc/liftover/main.nf b/tests/modules/ucsc/liftover/main.nf index 9670759a..168193f4 100644 --- a/tests/modules/ucsc/liftover/main.nf +++ b/tests/modules/ucsc/liftover/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' addParams( options: [:] ) +include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' workflow test_ucsc_liftover { diff --git a/tests/modules/ucsc/liftover/nextflow.config b/tests/modules/ucsc/liftover/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/liftover/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/liftover/test.yml b/tests/modules/ucsc/liftover/test.yml index 74df6512..c3016189 100644 --- a/tests/modules/ucsc/liftover/test.yml +++ b/tests/modules/ucsc/liftover/test.yml @@ -1,5 +1,5 @@ - name: ucsc liftover test_ucsc_liftover - command: nextflow run tests/modules/ucsc/liftover -entry test_ucsc_liftover -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/liftover -entry test_ucsc_liftover -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/liftover/nextflow.config tags: - ucsc - ucsc/liftover diff --git a/tests/modules/ucsc/wigtobigwig/main.nf b/tests/modules/ucsc/wigtobigwig/main.nf index 81296ac4..614d4150 100644 --- a/tests/modules/ucsc/wigtobigwig/main.nf +++ b/tests/modules/ucsc/wigtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' workflow test_ucsc_wigtobigwig { diff --git a/tests/modules/ucsc/wigtobigwig/nextflow.config b/tests/modules/ucsc/wigtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/wigtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/wigtobigwig/test.yml b/tests/modules/ucsc/wigtobigwig/test.yml index 15388adb..08d4bce4 100644 --- a/tests/modules/ucsc/wigtobigwig/test.yml +++ b/tests/modules/ucsc/wigtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc wigtobigwig test_ucsc_wigtobigwig - command: nextflow run tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/wigtobigwig/nextflow.config tags: - ucsc - ucsc/wigtobigwig diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf index 1404712b..483d48fc 100644 --- a/tests/modules/ultra/pipeline/main.nf +++ b/tests/modules/ultra/pipeline/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' addParams( options: [:] ) -include { GUNZIP } from '../../../../modules/gunzip/main.nf' addParams( options: [:] ) -include { GFFREAD } from '../../../../modules/gffread/main.nf' addParams( options: [args: "--sort-alpha --keep-genes -T", suffix: "_sorted"] ) +include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' +include { GUNZIP } from '../../../../modules/gunzip/main.nf' +include { GFFREAD } from '../../../../modules/gffread/main.nf' workflow test_ultra_pipeline { diff --git a/tests/modules/ultra/pipeline/nextflow.config b/tests/modules/ultra/pipeline/nextflow.config new file mode 100644 index 00000000..a3b88ea3 --- /dev/null +++ b/tests/modules/ultra/pipeline/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.args = '--sort-alpha --keep-genes -T' + ext.suffix = '_sorted' + } + +} diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml index 7140193b..d424ba73 100644 --- a/tests/modules/ultra/pipeline/test.yml +++ b/tests/modules/ultra/pipeline/test.yml @@ -1,5 +1,5 @@ - name: ultra pipeline test_ultra_pipeline - command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ultra/pipeline -entry test_ultra_pipeline -c ./tests/config/nextflow.config -c ./tests/modules/ultra/pipeline/nextflow.config tags: - ultra - ultra/pipeline diff --git a/tests/modules/unicycler/main.nf b/tests/modules/unicycler/main.nf index 5352fc8b..861b139b 100644 --- a/tests/modules/unicycler/main.nf +++ b/tests/modules/unicycler/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNICYCLER } from '../../../modules/unicycler/main.nf' addParams( options: [:] ) +include { UNICYCLER } from '../../../modules/unicycler/main.nf' workflow test_unicycler_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/unicycler/nextflow.config b/tests/modules/unicycler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unicycler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unicycler/test.yml b/tests/modules/unicycler/test.yml index 124ac3e2..e25845aa 100644 --- a/tests/modules/unicycler/test.yml +++ b/tests/modules/unicycler/test.yml @@ -1,5 +1,5 @@ - name: unicycler test_unicycler_single_end - command: nextflow run tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: @@ -10,7 +10,7 @@ - "Assembly complete" - name: unicycler test_unicycler_paired_end - command: nextflow run tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: @@ -21,7 +21,7 @@ - "Assembly complete" - name: unicycler test_unicycler_shortreads_longreads - command: nextflow run tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: diff --git a/tests/modules/untar/main.nf b/tests/modules/untar/main.nf index b7317bd9..056e3ea7 100644 --- a/tests/modules/untar/main.nf +++ b/tests/modules/untar/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../modules/untar/main.nf' workflow test_untar { input = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) diff --git a/tests/modules/untar/nextflow.config b/tests/modules/untar/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/untar/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/untar/test.yml b/tests/modules/untar/test.yml index 9f48e86c..6d0d1d12 100644 --- a/tests/modules/untar/test.yml +++ b/tests/modules/untar/test.yml @@ -1,5 +1,5 @@ - name: untar - command: nextflow run ./tests/modules/untar -entry test_untar -c tests/config/nextflow.config + command: nextflow run ./tests/modules/untar -entry test_untar -c ./tests/config/nextflow.config -c ./tests/modules/untar/nextflow.config tags: - untar files: diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf index b5b208be..520fe31e 100644 --- a/tests/modules/unzip/main.nf +++ b/tests/modules/unzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../modules/unzip/main.nf' workflow test_unzip { diff --git a/tests/modules/unzip/nextflow.config b/tests/modules/unzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unzip/test.yml b/tests/modules/unzip/test.yml index 1b0b1a97..8016b4fa 100644 --- a/tests/modules/unzip/test.yml +++ b/tests/modules/unzip/test.yml @@ -1,5 +1,5 @@ - name: unzip - command: nextflow run ./tests/modules/unzip -entry test_unzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unzip -entry test_unzip -c ./tests/config/nextflow.config -c ./tests/modules/unzip/nextflow.config tags: - unzip files: diff --git a/tests/modules/variantbam/main.nf b/tests/modules/variantbam/main.nf index 3ea09197..016a9104 100644 --- a/tests/modules/variantbam/main.nf +++ b/tests/modules/variantbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { VARIANTBAM } from '../../../modules/variantbam/main.nf' addParams( options: [args: '-m 1'] ) +include { VARIANTBAM } from '../../../modules/variantbam/main.nf' workflow test_variantbam { diff --git a/tests/modules/variantbam/nextflow.config b/tests/modules/variantbam/nextflow.config new file mode 100644 index 00000000..d0314010 --- /dev/null +++ b/tests/modules/variantbam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VARIANTBAM { + ext.args = '-m 1' + } + +} diff --git a/tests/modules/variantbam/test.yml b/tests/modules/variantbam/test.yml index 51b824cd..1c9550ed 100644 --- a/tests/modules/variantbam/test.yml +++ b/tests/modules/variantbam/test.yml @@ -1,5 +1,5 @@ - name: variantbam test_variantbam - command: nextflow run tests/modules/variantbam -entry test_variantbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/variantbam -entry test_variantbam -c ./tests/config/nextflow.config -c ./tests/modules/variantbam/nextflow.config tags: - variantbam files: diff --git a/tests/modules/vcftools/main.nf b/tests/modules/vcftools/main.nf index 2d4997de..21f9aa88 100644 --- a/tests/modules/vcftools/main.nf +++ b/tests/modules/vcftools/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq'] ) -include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq --exclude-bed'] ) +include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' +include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' workflow test_vcftools_vcf_base { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/vcftools/nextflow.config b/tests/modules/vcftools/nextflow.config new file mode 100644 index 00000000..6865bbea --- /dev/null +++ b/tests/modules/vcftools/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VCFTOOLS_BASE { + ext.args = '--freq' + } + + withName: VCFTOOLS_OPTIONAL { + ext.args = '--freq --exclude-bed' + } + +} diff --git a/tests/modules/vcftools/test.yml b/tests/modules/vcftools/test.yml index 81529be2..5314ea75 100644 --- a/tests/modules/vcftools/test.yml +++ b/tests/modules/vcftools/test.yml @@ -1,5 +1,5 @@ - name: vcftools test_vcftools_vcf_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -7,7 +7,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -15,7 +15,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcf_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -23,7 +23,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: diff --git a/tests/modules/yara/index/main.nf b/tests/modules/yara/index/main.nf index 35a86182..89eb0f7d 100644 --- a/tests/modules/yara/index/main.nf +++ b/tests/modules/yara/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: [publish_dir:'yara'] ) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' workflow test_yara_index { diff --git a/tests/modules/yara/index/nextflow.config b/tests/modules/yara/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/yara/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/yara/index/test.yml b/tests/modules/yara/index/test.yml index de6f1cf6..a8d17866 100644 --- a/tests/modules/yara/index/test.yml +++ b/tests/modules/yara/index/test.yml @@ -1,5 +1,5 @@ - name: yara index test_yara_index - command: nextflow run tests/modules/yara/index -entry test_yara_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/index -entry test_yara_index -c ./tests/config/nextflow.config -c ./tests/modules/yara/index/nextflow.config tags: - yara/index - yara diff --git a/tests/modules/yara/mapper/main.nf b/tests/modules/yara/mapper/main.nf index 9cdce40d..18800eb3 100644 --- a/tests/modules/yara/mapper/main.nf +++ b/tests/modules/yara/mapper/main.nf @@ -3,15 +3,18 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams(options: ['args': '-e 3']) -include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' addParams(options: ['args': '-e 3']) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' +include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' workflow test_yara_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) @@ -19,12 +22,15 @@ workflow test_yara_single_end { workflow test_yara_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) } diff --git a/tests/modules/yara/mapper/nextflow.config b/tests/modules/yara/mapper/nextflow.config new file mode 100644 index 00000000..a626a8fc --- /dev/null +++ b/tests/modules/yara/mapper/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: YARA_INDEX { + ext.args = '-e 3' + } + + withName: YARA_MAPPER { + ext.args = '-e 3' + } + +} diff --git a/tests/modules/yara/mapper/test.yml b/tests/modules/yara/mapper/test.yml index 51b056df..186f70b4 100644 --- a/tests/modules/yara/mapper/test.yml +++ b/tests/modules/yara/mapper/test.yml @@ -1,68 +1,68 @@ - name: yara mapper test_yara_single_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_single_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 - name: yara mapper test_yara_paired_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test_2.mapped.bam - path: output/yara/test_1.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 07c0b1b4..beea38c2 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -1,9 +1,10 @@ - name: align bowtie2 single-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_single_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - bowtie2 # - bowtie2/align @@ -42,9 +43,10 @@ - name: align bowtie2 paired-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - bowtie2 # - bowtie2/align diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index b84735e5..7dc73c80 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -1,15 +1,16 @@ - name: bam sort samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: 8b56bb7d26ced04112f712250d915aaa @@ -25,8 +26,9 @@ - name: bam sort samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - samtools # - samtools/index diff --git a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml index d93c95a5..2b2e45d1 100644 --- a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml @@ -1,12 +1,13 @@ - name: bam stats samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.single_end.sorted.bam.flagstat md5sum: 2191911d72575a2358b08b1df64ccb53 @@ -17,12 +18,13 @@ - name: bam stats samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools - # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + - subworkflows + # - subworkflows/bam_stats_samtools + # # Modules + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.paired_end.sorted.bam.flagstat md5sum: 4f7ffd1e6a5e85524d443209ac97d783 diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index e6d80409..63cf64f8 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -1,8 +1,9 @@ - name: gatk_create_som_pon command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config tags: - - subworkflows/gatk_create_som_pon - - gatk4 + - subworkflows + # - subworkflows/gatk_create_som_pon + # - gatk4 # Modules # - gatk4/genomicsdbimport # - gatk4/createsomaticpanelofnormals diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml index 3c6753fb..4b335065 100644 --- a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml @@ -1,7 +1,8 @@ - name: gatk_tumor_normal_somatic_variant_calling command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling -entry test_gatk_tumor_normal_somatic_variant_calling -c tests/config/nextflow.config tags: - - subworkflows/gatk_tumor_normal_somatic_variant_calling + - subworkflows + # - subworkflows/gatk_tumor_normal_somatic_variant_calling # Modules # - gatk4/mutect2 # - gatk4/learnreadorientationmodel diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml index 797ae936..9d2d5c10 100644 --- a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml @@ -1,7 +1,8 @@ - name: gatk_tumor_only_somatic_variant_calling command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling -entry test_gatk_tumor_only_somatic_variant_calling -c tests/config/nextflow.config tags: - - subworkflows/gatk_tumor_only_somatic_variant_calling + - subworkflows + # - subworkflows/gatk_tumor_only_somatic_variant_calling # Modules # - gatk4/mutect2 # - gatk4/getpileupsummaries diff --git a/tests/subworkflows/nf-core/sra_fastq/test.yml b/tests/subworkflows/nf-core/sra_fastq/test.yml index 6f953ccf..4b75431f 100644 --- a/tests/subworkflows/nf-core/sra_fastq/test.yml +++ b/tests/subworkflows/nf-core/sra_fastq/test.yml @@ -1,11 +1,12 @@ - name: sra fastq single-end command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_single_end -c tests/config/nextflow.config tags: - - subworkflows/sra_fastq + - subworkflows + # - subworkflows/sra_fastq # Modules - - sratools - - sratools/prefetch - - sratools/fasterqdump + # - sratools + # - sratools/prefetch + # - sratools/fasterqdump files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -13,11 +14,12 @@ - name: sra fastq paired-end command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_paired_end -c tests/config/nextflow.config tags: - - subworkflows/sra_fastq + - subworkflows + # - subworkflows/sra_fastq # Modules - - sratools - - sratools/prefetch - - sratools/fasterqdump + # - sratools + # - sratools/prefetch + # - sratools/fasterqdump files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 From 1f8f86b79361fb9999b56680105f439df81d718d Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Mon, 29 Nov 2021 14:32:23 +0100 Subject: [PATCH 278/314] takes gunzip out of fargene main.nf (#1090) * takes gunzip out of fargene main.nf * update definition of input * options.args -> args Co-authored-by: James A. Fellows Yates --- modules/fargene/main.nf | 15 ++++++--------- modules/fargene/meta.yml | 2 +- tests/modules/fargene/main.nf | 4 +++- tests/modules/fargene/test.yml | 8 +++++--- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index 5bf1c604..ac3f8338 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -35,15 +35,12 @@ process FARGENE { def args = task.ext.args ?: '' prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - gzip \\ - -cdf $input \\ - > unziped.fa | - fargene \\ - $args \\ - -p $task.cpus \\ - -i unziped.fa \\ - --hmm-model $hmm_model \\ - -o $prefix + fargene \\ + $args \\ + -p $task.cpus \\ + -i $input \\ + --hmm-model $hmm_model \\ + -o $prefix cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/fargene/meta.yml b/modules/fargene/meta.yml index 98ec12bb..35e98008 100644 --- a/modules/fargene/meta.yml +++ b/modules/fargene/meta.yml @@ -23,7 +23,7 @@ input: e.g. [ id:'test', single_end:false ] - input: type: file - description: fasta or paired-end fastq file containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) + description: uncompressed fasta file or paired-end fastq files containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) pattern: "*.{fasta}" - hmm_model: type: string diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf index 6600015b..471862e1 100644 --- a/tests/modules/fargene/main.nf +++ b/tests/modules/fargene/main.nf @@ -2,6 +2,7 @@ nextflow.enable.dsl = 2 +include { GUNZIP } from '../../../modules/gunzip/main.nf' include { FARGENE } from '../../../modules/fargene/main.nf' workflow test_fargene { @@ -10,5 +11,6 @@ workflow test_fargene { file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) ] hmm_model = 'class_a' - FARGENE ( input, hmm_model ) + GUNZIP ( input ) + FARGENE ( GUNZIP.out.gunzip, hmm_model ) } diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml index 622e44b0..d97e2257 100644 --- a/tests/modules/fargene/test.yml +++ b/tests/modules/fargene/test.yml @@ -4,9 +4,11 @@ - fargene files: - path: output/fargene/fargene_analysis.log - - path: output/fargene/test/hmmsearchresults/unziped-class_A-hmmsearched.out + - path: output/fargene/test/hmmsearchresults/test1.contigs-class_A-hmmsearched.out - path: output/fargene/test/results_summary.txt md5sum: 690d351cfc52577263ef4cfab1c81f50 - - path: output/fargene/test/tmpdir/tmp.out - - path: output/fargene/test/tmpdir/unziped-positives.out + - path: output/fargene/test/tmpdir/test1.contigs-positives.out md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/fargene/test/tmpdir/tmp.out + - path: output/gunzip/test1.contigs.fa + md5sum: 80c4d78f2810f6d9e90fa6da9bb9c4f9 From 9d0cad583b9a71a6509b754fdf589cbfbed08961 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Thu, 2 Dec 2021 12:39:55 +0000 Subject: [PATCH 279/314] Change syntax from task.ext.suffix to tast.ext.prefix in all modules (#1110) * Add comment line for consistency * Remove all functions.nf * Remove include functions.nf and publishDir options * Replace options.args3 with task.ext.args3 - 3 modules * Replace options.args3 with task.ext.args3 - 17 modules * Replace {task.cpus} with task.cpus * Replace off on off off off off off off off on off on off on off off off on off off off on on off off off on on off off off off off off off on off off off off on off on on off off off on on on on off off off on off on on off on on off off on on on off on on off on off off off off on off off off on off off on off on off off off on on off on off on off off on off off off on off off off on off off off off on off off off on on on off on on off off on off on on on off on on off on on on off off off off off on on off off on off off off off off on off off on on off on on off on off off off on off off off off on on off on off off on off off on off on off off off off off off off off on on off on off off off.args with * Add def args = task.ext.args line to all modules in script section * Replace options.args with args and args_list * Initialise args2 and args3 properly * Replace container syntax * Revert container changes for cellranger/mkref * Replace getProcessName in all modules * Replace getSoftwareName in all modules * Unify modules using VERSION variable * Replae options.suffix with task.ext.suffix * Remove NF version restriction for CI * Bump NF version in README * Replace task.process.tokenize logic with task.process * Minor tweaks to unify syntax in tests main.nf * Add a separate nextflow.config for each module * Transfer remaining module options to nextflow.config * Remove addParams from tests main.nf * Remove TODO statements * Use -c to import module specific config * Bump NF version to 21.10.3 * Fix tests for artic/minion * Fix broken publishDir syntax * Standardise and fix obvious failing module tests * Remove kronatools to krona * Comment out tags in subworkflow test.yml * Fix failing module tests * Add consistent indentation to nextflow.config * Comment out subworklow definitions * Fix kallistobustools/ref * Fix rmarkdownnotebook * Fix jupyternotebook * Quote task.process * Add plink2/vcf to pytest_modules.yml * Remove NF_CORE_MODULES_TEST from pytest CI * Fix more tests * Move bacteroides_fragilis to prokaryotes folder * Fix cooler merge tests * Fix kallistobustools/count tests * Fix kallistobustools/ref tests * Update test_10x_1_fastq_gz file for kallistobustools/count tests * Fix bcftools/query tests * Fix delly/call tests * Fix cooler/zoomify tests * Fix csvtk/split tests * Fix gatk4/intervallisttools tests * Fix gatk4/variantfiltration * Fix pydamage/filter tests * Fix test data for unicycler * Fix gstama/collapse module * Fix leehom tests * Fix metaphlan3 tests * Fix pairtools/select tests * Update nextflow.config * Update nextflow.config * feat: update syntax * Fix arriba tests * Fix more failing tests * Update test syntax * Remove comments from tests nextflow.config * Apply suggestions from code review * Fix kallistobustools/count module * Update dumpsoftwareversions module * Update custom/dumpsoftwareversions * Add args2 to untar module * Update leftover modules * Remove last remaining addParams * Change syntax from task.ext.suffix to tast.ext.prefix * Change nextflow.config in all tests to use ext.prefix instead of ext.suffix Co-authored-by: JoseEspinosa Co-authored-by: Gregor Sturm Co-authored-by: MaxUlysse --- modules/abacas/main.nf | 2 +- modules/adapterremoval/main.nf | 2 +- modules/agrvate/main.nf | 2 +- modules/allelecounter/main.nf | 2 +- modules/arriba/main.nf | 2 +- modules/artic/guppyplex/main.nf | 2 +- modules/assemblyscan/main.nf | 2 +- modules/ataqv/ataqv/main.nf | 2 +- modules/bamaligncleaner/main.nf | 2 +- modules/bamtools/split/main.nf | 2 +- modules/bamutil/trimbam/main.nf | 2 +- modules/bandage/image/main.nf | 2 +- modules/bbmap/align/main.nf | 2 +- modules/bbmap/bbduk/main.nf | 2 +- modules/bbmap/bbsplit/main.nf | 2 +- modules/bcftools/consensus/main.nf | 2 +- modules/bcftools/filter/main.nf | 2 +- modules/bcftools/index/main.nf | 2 +- modules/bcftools/mpileup/main.nf | 2 +- modules/bcftools/norm/main.nf | 2 +- modules/bcftools/query/main.nf | 2 +- modules/bcftools/reheader/main.nf | 2 +- modules/bcftools/stats/main.nf | 2 +- modules/bcftools/view/main.nf | 2 +- modules/bedtools/bamtobed/main.nf | 2 +- modules/bedtools/complement/main.nf | 2 +- modules/bedtools/genomecov/main.nf | 2 +- modules/bedtools/intersect/main.nf | 2 +- modules/bedtools/makewindows/main.nf | 2 +- modules/bedtools/maskfasta/main.nf | 2 +- modules/bedtools/merge/main.nf | 2 +- modules/bedtools/slop/main.nf | 2 +- modules/bedtools/sort/main.nf | 2 +- modules/bedtools/subtract/main.nf | 2 +- modules/bismark/align/main.nf | 2 +- modules/bismark/deduplicate/main.nf | 2 +- modules/blast/blastn/main.nf | 2 +- modules/bowtie/align/main.nf | 2 +- modules/bowtie2/align/main.nf | 2 +- modules/bwa/aln/main.nf | 2 +- modules/bwa/mem/main.nf | 2 +- modules/bwa/sampe/main.nf | 2 +- modules/bwa/samse/main.nf | 2 +- modules/bwamem2/mem/main.nf | 2 +- modules/bwameth/align/main.nf | 2 +- modules/cat/fastq/main.nf | 2 +- modules/chromap/chromap/main.nf | 2 +- modules/clonalframeml/main.nf | 2 +- modules/cmseq/polymut/main.nf | 2 +- modules/cooler/cload/main.nf | 2 +- modules/cooler/dump/main.nf | 2 +- modules/cooler/merge/main.nf | 2 +- modules/cooler/zoomify/main.nf | 2 +- modules/csvtk/split/main.nf | 2 +- modules/cutadapt/main.nf | 2 +- modules/dastool/dastool/main.nf | 2 +- modules/dastool/scaffolds2bin/main.nf | 2 +- modules/deeptools/computematrix/main.nf | 2 +- modules/deeptools/plotfingerprint/main.nf | 2 +- modules/deeptools/plotheatmap/main.nf | 2 +- modules/deeptools/plotprofile/main.nf | 2 +- modules/delly/call/main.nf | 2 +- modules/diamond/blastp/main.nf | 2 +- modules/diamond/blastx/main.nf | 2 +- modules/dshbio/exportsegments/main.nf | 2 +- modules/dshbio/filterbed/main.nf | 2 +- modules/dshbio/filtergff3/main.nf | 2 +- modules/dshbio/splitbed/main.nf | 2 +- modules/dshbio/splitgff3/main.nf | 2 +- modules/ectyper/main.nf | 2 +- modules/emmtyper/main.nf | 2 +- modules/ensemblvep/main.nf | 2 +- modules/expansionhunter/main.nf | 2 +- modules/fastani/main.nf | 2 +- modules/fastp/main.nf | 2 +- modules/fastqc/main.nf | 2 +- modules/fastqscan/main.nf | 2 +- modules/fgbio/callmolecularconsensusreads/main.nf | 2 +- modules/fgbio/fastqtobam/main.nf | 2 +- modules/fgbio/groupreadsbyumi/main.nf | 2 +- modules/fgbio/sortbam/main.nf | 2 +- modules/filtlong/main.nf | 2 +- modules/flash/main.nf | 2 +- modules/freebayes/main.nf | 2 +- modules/gatk4/applybqsr/main.nf | 2 +- modules/gatk4/baserecalibrator/main.nf | 2 +- modules/gatk4/bedtointervallist/main.nf | 2 +- modules/gatk4/calculatecontamination/main.nf | 2 +- modules/gatk4/createsomaticpanelofnormals/main.nf | 2 +- modules/gatk4/estimatelibrarycomplexity/main.nf | 2 +- modules/gatk4/fastqtosam/main.nf | 2 +- modules/gatk4/filtermutectcalls/main.nf | 2 +- modules/gatk4/genotypegvcfs/main.nf | 2 +- modules/gatk4/getpileupsummaries/main.nf | 2 +- modules/gatk4/haplotypecaller/main.nf | 2 +- modules/gatk4/intervallisttools/main.nf | 2 +- modules/gatk4/learnreadorientationmodel/main.nf | 2 +- modules/gatk4/markduplicates/main.nf | 2 +- modules/gatk4/mergebamalignment/main.nf | 2 +- modules/gatk4/mergevcfs/main.nf | 2 +- modules/gatk4/mutect2/main.nf | 2 +- modules/gatk4/revertsam/main.nf | 2 +- modules/gatk4/samtofastq/main.nf | 2 +- modules/gatk4/splitncigarreads/main.nf | 2 +- modules/gatk4/variantfiltration/main.nf | 2 +- modules/genrich/main.nf | 2 +- modules/glnexus/main.nf | 2 +- modules/graphmap2/align/main.nf | 2 +- modules/gstama/collapse/main.nf | 2 +- modules/gstama/merge/main.nf | 2 +- modules/gunc/run/main.nf | 2 +- modules/hicap/main.nf | 2 +- modules/hifiasm/main.nf | 2 +- modules/hisat2/align/main.nf | 2 +- modules/hmmcopy/readcounter/main.nf | 2 +- modules/hmmer/hmmalign/main.nf | 2 +- modules/homer/annotatepeaks/main.nf | 2 +- modules/homer/findpeaks/main.nf | 2 +- modules/homer/maketagdirectory/main.nf | 2 +- modules/homer/makeucscfile/main.nf | 2 +- modules/imputeme/vcftoprs/main.nf | 2 +- modules/ismapper/main.nf | 2 +- modules/isoseq3/cluster/main.nf | 2 +- modules/isoseq3/refine/main.nf | 2 +- modules/ivar/consensus/main.nf | 2 +- modules/ivar/trim/main.nf | 2 +- modules/ivar/variants/main.nf | 2 +- modules/jupyternotebook/main.nf | 2 +- modules/kallistobustools/count/main.nf | 2 +- modules/kleborate/main.nf | 2 +- modules/kraken2/kraken2/main.nf | 2 +- modules/last/dotplot/main.nf | 2 +- modules/last/lastal/main.nf | 2 +- modules/last/lastdb/main.nf | 2 +- modules/last/mafconvert/main.nf | 2 +- modules/last/mafswap/main.nf | 2 +- modules/last/postmask/main.nf | 2 +- modules/last/split/main.nf | 2 +- modules/last/train/main.nf | 2 +- modules/lima/main.nf | 2 +- modules/lissero/main.nf | 2 +- modules/lofreq/call/main.nf | 2 +- modules/lofreq/callparallel/main.nf | 2 +- modules/lofreq/filter/main.nf | 2 +- modules/lofreq/indelqual/main.nf | 2 +- modules/macs2/callpeak/main.nf | 2 +- modules/manta/germline/main.nf | 2 +- modules/manta/somatic/main.nf | 2 +- modules/manta/tumoronly/main.nf | 2 +- modules/mapdamage2/main.nf | 2 +- modules/mash/sketch/main.nf | 2 +- modules/mashtree/main.nf | 2 +- modules/maxbin2/main.nf | 2 +- modules/medaka/main.nf | 2 +- modules/megahit/main.nf | 2 +- modules/meningotype/main.nf | 2 +- modules/metabat2/jgisummarizebamcontigdepths/main.nf | 2 +- modules/metabat2/metabat2/main.nf | 2 +- modules/metaphlan3/main.nf | 2 +- modules/methyldackel/mbias/main.nf | 2 +- modules/minia/main.nf | 2 +- modules/miniasm/main.nf | 2 +- modules/minimap2/align/main.nf | 2 +- modules/mlst/main.nf | 2 +- modules/mosdepth/main.nf | 2 +- modules/msisensor/scan/main.nf | 2 +- modules/mtnucratio/main.nf | 2 +- modules/mummer/main.nf | 2 +- modules/muscle/main.nf | 2 +- modules/nanolyse/main.nf | 2 +- modules/ncbigenomedownload/main.nf | 2 +- modules/ngmaster/main.nf | 2 +- modules/nucmer/main.nf | 2 +- modules/pairtools/dedup/main.nf | 2 +- modules/pairtools/flip/main.nf | 2 +- modules/pairtools/parse/main.nf | 2 +- modules/pairtools/restrict/main.nf | 2 +- modules/pairtools/select/main.nf | 2 +- modules/pairtools/sort/main.nf | 2 +- modules/pangolin/main.nf | 2 +- modules/paraclu/main.nf | 2 +- modules/pbbam/pbmerge/main.nf | 2 +- modules/pbccs/main.nf | 2 +- modules/peddy/main.nf | 2 +- modules/phantompeakqualtools/main.nf | 2 +- modules/phyloflash/main.nf | 4 ++-- modules/picard/collecthsmetrics/main.nf | 2 +- modules/picard/collectmultiplemetrics/main.nf | 2 +- modules/picard/collectwgsmetrics/main.nf | 2 +- modules/picard/filtersamreads/main.nf | 2 +- modules/picard/markduplicates/main.nf | 2 +- modules/picard/mergesamfiles/main.nf | 2 +- modules/picard/sortsam/main.nf | 2 +- modules/pirate/main.nf | 2 +- modules/plink/extract/main.nf | 2 +- modules/plink/vcf/main.nf | 2 +- modules/plink2/vcf/main.nf | 2 +- modules/pmdtools/filter/main.nf | 2 +- modules/porechop/main.nf | 2 +- modules/preseq/lcextrap/main.nf | 2 +- modules/pydamage/analyze/main.nf | 2 +- modules/pydamage/filter/main.nf | 2 +- modules/qcat/main.nf | 2 +- modules/racon/main.nf | 2 +- modules/rasusa/main.nf | 2 +- modules/rmarkdownnotebook/main.nf | 2 +- modules/roary/main.nf | 2 +- modules/rseqc/bamstat/main.nf | 2 +- modules/rseqc/inferexperiment/main.nf | 2 +- modules/rseqc/innerdistance/main.nf | 2 +- modules/rseqc/junctionannotation/main.nf | 2 +- modules/rseqc/junctionsaturation/main.nf | 2 +- modules/rseqc/readdistribution/main.nf | 2 +- modules/rseqc/readduplication/main.nf | 2 +- modules/samblaster/main.nf | 2 +- modules/samtools/ampliconclip/main.nf | 2 +- modules/samtools/bam2fq/main.nf | 2 +- modules/samtools/depth/main.nf | 2 +- modules/samtools/fastq/main.nf | 2 +- modules/samtools/fixmate/main.nf | 2 +- modules/samtools/mpileup/main.nf | 2 +- modules/samtools/sort/main.nf | 2 +- modules/samtools/view/main.nf | 2 +- modules/scoary/main.nf | 2 +- modules/seacr/callpeak/main.nf | 2 +- modules/seqsero2/main.nf | 2 +- modules/seqtk/mergepe/main.nf | 2 +- modules/seqtk/sample/main.nf | 2 +- modules/sequenzautils/bam2seqz/main.nf | 2 +- modules/sequenzautils/gcwiggle/main.nf | 2 +- modules/seqwish/induce/main.nf | 2 +- modules/snpdists/main.nf | 2 +- modules/snpeff/main.nf | 2 +- modules/sortmerna/main.nf | 2 +- modules/spades/main.nf | 2 +- modules/spatyper/main.nf | 2 +- modules/staphopiasccmec/main.nf | 2 +- modules/star/align/main.nf | 2 +- modules/strelka/germline/main.nf | 2 +- modules/strelka/somatic/main.nf | 2 +- modules/stringtie/stringtie/main.nf | 2 +- modules/subread/featurecounts/main.nf | 2 +- modules/tabix/bgzip/main.nf | 2 +- modules/tabix/bgziptabix/main.nf | 2 +- modules/tiddit/cov/main.nf | 2 +- modules/tiddit/sv/main.nf | 2 +- modules/trimgalore/main.nf | 2 +- modules/ucsc/bed12tobigbed/main.nf | 2 +- modules/ucsc/bedclip/main.nf | 2 +- modules/ucsc/bedgraphtobigwig/main.nf | 2 +- modules/ucsc/bigwigaverageoverbed/main.nf | 2 +- modules/ucsc/liftover/main.nf | 2 +- modules/ultra/pipeline/main.nf | 2 +- modules/umitools/dedup/main.nf | 2 +- modules/umitools/extract/main.nf | 2 +- modules/unicycler/main.nf | 2 +- modules/variantbam/main.nf | 2 +- modules/vcftools/main.nf | 2 +- modules/yara/mapper/main.nf | 2 +- tests/modules/bbmap/bbduk/nextflow.config | 2 +- tests/modules/bcftools/reheader/nextflow.config | 2 +- tests/modules/bedtools/complement/nextflow.config | 2 +- tests/modules/bedtools/genomecov/nextflow.config | 2 +- tests/modules/bedtools/intersect/nextflow.config | 2 +- tests/modules/bedtools/merge/nextflow.config | 2 +- tests/modules/bedtools/slop/nextflow.config | 2 +- tests/modules/bedtools/sort/nextflow.config | 2 +- tests/modules/diamond/blastp/nextflow.config | 2 +- tests/modules/diamond/blastx/nextflow.config | 2 +- tests/modules/dshbio/filterbed/nextflow.config | 2 +- tests/modules/dshbio/filtergff3/nextflow.config | 2 +- tests/modules/dshbio/splitbed/nextflow.config | 2 +- tests/modules/dshbio/splitgff3/nextflow.config | 2 +- .../modules/fgbio/callmolecularconsensusreads/nextflow.config | 4 ++-- .../modules/gatk4/createsomaticpanelofnormals/nextflow.config | 2 +- tests/modules/gatk4/filtermutectcalls/nextflow.config | 2 +- tests/modules/gatk4/genotypegvcfs/nextflow.config | 2 +- tests/modules/gatk4/learnreadorientationmodel/nextflow.config | 2 +- tests/modules/gatk4/variantfiltration/nextflow.config | 2 +- tests/modules/gffread/nextflow.config | 2 +- tests/modules/gstama/collapse/nextflow.config | 2 +- tests/modules/gstama/merge/nextflow.config | 2 +- tests/modules/isoseq3/refine/nextflow.config | 2 +- tests/modules/last/postmask/nextflow.config | 2 +- tests/modules/last/split/nextflow.config | 2 +- tests/modules/lima/nextflow.config | 2 +- tests/modules/lofreq/indelqual/nextflow.config | 2 +- tests/modules/medaka/nextflow.config | 2 +- tests/modules/metaphlan3/nextflow.config | 2 +- tests/modules/miniasm/nextflow.config | 2 +- tests/modules/nanolyse/nextflow.config | 2 +- tests/modules/pairtools/dedup/nextflow.config | 2 +- tests/modules/pairtools/parse/nextflow.config | 2 +- tests/modules/pairtools/restrict/nextflow.config | 2 +- tests/modules/pairtools/sort/nextflow.config | 2 +- tests/modules/pbbam/pbmerge/nextflow.config | 2 +- tests/modules/picard/filtersamreads/nextflow.config | 4 ++-- tests/modules/picard/sortsam/nextflow.config | 2 +- tests/modules/plink/extract/nextflow.config | 2 +- tests/modules/porechop/nextflow.config | 2 +- tests/modules/rasusa/nextflow.config | 2 +- tests/modules/samblaster/nextflow.config | 2 +- tests/modules/samtools/merge/nextflow.config | 2 +- tests/modules/samtools/sort/nextflow.config | 2 +- tests/modules/seqtk/mergepe/nextflow.config | 2 +- tests/modules/seqtk/sample/nextflow.config | 2 +- tests/modules/seqtk/subseq/nextflow.config | 2 +- tests/modules/ucsc/bedclip/nextflow.config | 2 +- tests/modules/ultra/pipeline/nextflow.config | 2 +- 309 files changed, 312 insertions(+), 312 deletions(-) diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index 7fe71e3a..49040214 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -17,7 +17,7 @@ process ABACAS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ abacas.pl \\ -r $fasta \\ diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 33955ed2..0cf257ff 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -17,7 +17,7 @@ process ADAPTERREMOVAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index 06392e16..aff72abc 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -17,7 +17,7 @@ process AGRVATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ agrvate \\ $args \\ diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 5cbc4cbd..850a018f 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -18,7 +18,7 @@ process ALLELECOUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference_options = fasta ? "-r $fasta": "" """ diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 459ff100..0fcb6ba7 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -19,7 +19,7 @@ process ARRIBA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def blacklist = (args.contains('-b')) ? '' : '-f blacklist' """ arriba \\ diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index a69e5381..780f5111 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -16,7 +16,7 @@ process ARTIC_GUPPYPLEX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ artic \\ guppyplex \\ diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf index 7b5b752b..56541222 100644 --- a/modules/assemblyscan/main.nf +++ b/modules/assemblyscan/main.nf @@ -16,7 +16,7 @@ process ASSEMBLYSCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ assembly-scan $assembly > ${prefix}.json diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf index 39602d30..20525e85 100644 --- a/modules/ataqv/ataqv/main.nf +++ b/modules/ataqv/ataqv/main.nf @@ -21,7 +21,7 @@ process ATAQV_ATAQV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def peak = peak_file ? "--peak-file $peak_file" : '' def tss = tss_file ? "--tss-file $tss_file" : '' def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index f1481c39..88fe21aa 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -16,7 +16,7 @@ process BAMALIGNCLEANER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bamAlignCleaner \\ diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 676aab6f..8d5e5690 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -16,7 +16,7 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bamtools \\ split \\ diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf index a210fe5f..9ceb2b65 100644 --- a/modules/bamutil/trimbam/main.nf +++ b/modules/bamutil/trimbam/main.nf @@ -16,7 +16,7 @@ process BAMUTIL_TRIMBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bam \\ trimBam \\ diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index e31566d1..bc2a9495 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -17,7 +17,7 @@ process BANDAGE_IMAGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ Bandage image $gfa ${prefix}.png $args Bandage image $gfa ${prefix}.svg $args diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index ef23fada..ac839497 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -18,7 +18,7 @@ process BBMAP_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index 98a21eab..79c3c306 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -18,7 +18,7 @@ process BBMAP_BBDUK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" def contaminants_fa = contaminants ? "ref=$contaminants" : '' diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index 53f6b1aa..0c916dfe 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -22,7 +22,7 @@ process BBMAP_BBSPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 9b9384a6..4633790e 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_CONSENSUS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cat $fasta | bcftools consensus $vcf $args > ${prefix}.fa header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 87ad3183..95e0249a 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools filter \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf index 8f40c683..0cdebf31 100644 --- a/modules/bcftools/index/main.nf +++ b/modules/bcftools/index/main.nf @@ -17,7 +17,7 @@ process BCFTOOLS_INDEX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools \\ diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index 9d91193c..8a209a66 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -21,7 +21,7 @@ process BCFTOOLS_MPILEUP { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ echo "${meta.id}" > sample_name.list diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 79ab36e0..95da56db 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -17,7 +17,7 @@ process BCFTOOLS_NORM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools norm \\ --fasta-ref ${fasta} \\ diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index 1919fa76..d1098f99 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -19,7 +19,7 @@ process BCFTOOLS_QUERY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 3cbe2d8f..018431a9 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -18,7 +18,7 @@ process BCFTOOLS_REHEADER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" """ diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index c66f4453..67e8dca7 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_STATS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools stats $args $vcf > ${prefix}.bcftools_stats.txt cat <<-END_VERSIONS > versions.yml diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index b2cbb580..f37c1ab9 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -19,7 +19,7 @@ process BCFTOOLS_VIEW { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index aebf7339..98d9ea2f 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_BAMTOBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ bamtobed \\ diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index df44b5bc..3146827c 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_COMPLEMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ complement \\ diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index e2a74ed3..ca491e75 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -24,7 +24,7 @@ process BEDTOOLS_GENOMECOV { args += " -bg" } - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (intervals.name =~ /\.bam/) { """ bedtools \\ diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index e01c78ac..afb0d056 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_INTERSECT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ intersect \\ diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index cb7d6561..2414393c 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_MAKEWINDOWS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ bedtools \\ diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 77be060c..7eeb4c7d 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_MASKFASTA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ maskfasta \\ diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 907f1c9b..5f1da95b 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ merge \\ diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index e5d92850..9d8633ec 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_SLOP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ slop \\ diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 15e69036..1ed95a57 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ sort \\ diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index e645109d..b2efefe5 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_SUBTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ subtract \\ diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index 95e7cdfc..e490b48c 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -19,7 +19,7 @@ process BISMARK_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ bismark \\ diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index c95c54d1..16c624f1 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -17,7 +17,7 @@ process BISMARK_DEDUPLICATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ deduplicate_bismark \\ diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index d1bdcf77..3a0bafe0 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -17,7 +17,7 @@ process BLAST_BLASTN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` blastn \\ diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 12188269..b25b5e21 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -20,7 +20,7 @@ process BOWTIE_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 11c9c20a..41c8a6bf 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -20,7 +20,7 @@ process BOWTIE2_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' """ diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index f6cdaefa..992e25de 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -17,7 +17,7 @@ process BWA_ALN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 9a04ed63..801293a8 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -18,7 +18,7 @@ process BWA_MEM { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 2abd9335..0b5ec255 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -17,7 +17,7 @@ process BWA_SAMPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 56e9127f..bee06bc8 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -17,7 +17,7 @@ process BWA_SAMSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 7c238741..81b4b8ab 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -18,7 +18,7 @@ process BWAMEM2_MEM { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 06e9da44..0bcd9bac 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -18,7 +18,7 @@ process BWAMETH_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index b6be93b0..c5ece83a 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -16,7 +16,7 @@ process CAT_FASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def readList = reads.collect{ it.toString() } if (meta.single_end) { if (readList.size > 1) { diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index f6686cf2..4a7f0097 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -28,7 +28,7 @@ process CHROMAP_CHROMAP { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def file_extension = args.contains("--SAM") ? 'sam' : args.contains("--TagAlign")? 'tagAlign' : args.contains("--pairs")? 'pairs' : 'bed' diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf index 60eaad12..db647a38 100644 --- a/modules/clonalframeml/main.nf +++ b/modules/clonalframeml/main.nf @@ -21,7 +21,7 @@ process CLONALFRAMEML { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ClonalFrameML \\ $newick \\ diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf index 18bb8c59..47e86f0c 100644 --- a/modules/cmseq/polymut/main.nf +++ b/modules/cmseq/polymut/main.nf @@ -18,7 +18,7 @@ process CMSEQ_POLYMUT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fasta_refid = fasta ? "-c $fasta" : "" def sortindex = bai ? "" : "--sortindex" """ diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf index ed7a41a1..d8bdc031 100644 --- a/modules/cooler/cload/main.nf +++ b/modules/cooler/cload/main.nf @@ -18,7 +18,7 @@ process COOLER_CLOAD { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def nproc = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : '' """ diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 0836640e..a438acc8 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -17,7 +17,7 @@ process COOLER_DUMP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf index 0fed76c9..b1814b68 100644 --- a/modules/cooler/merge/main.nf +++ b/modules/cooler/merge/main.nf @@ -16,7 +16,7 @@ process COOLER_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cooler merge \\ $args \\ diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf index e61ca99d..226d4114 100644 --- a/modules/cooler/zoomify/main.nf +++ b/modules/cooler/zoomify/main.nf @@ -16,7 +16,7 @@ process COOLER_ZOOMIFY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cooler zoomify \\ $args \\ diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf index 89b44154..52ab7ec7 100644 --- a/modules/csvtk/split/main.nf +++ b/modules/csvtk/split/main.nf @@ -18,7 +18,7 @@ process CSVTK_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index f98113e8..89105715 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -17,7 +17,7 @@ process CUTADAPT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ cutadapt \\ diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index b67ee993..722f6c55 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -28,7 +28,7 @@ process DASTOOL_DASTOOL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bin_list = bins instanceof List ? bins.join(",") : "$bins" def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" def db_dir = db_directory ? "--db_directory $db_directory" : "" diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf index 78a06b6e..09f800bb 100644 --- a/modules/dastool/scaffolds2bin/main.nf +++ b/modules/dastool/scaffolds2bin/main.nf @@ -17,7 +17,7 @@ process DASTOOL_SCAFFOLDS2BIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def file_extension = extension ? extension : "fasta" """ diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index e39310f4..70be934b 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -18,7 +18,7 @@ process DEEPTOOLS_COMPUTEMATRIX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ computeMatrix \\ $args \\ diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index aeb635ce..7925c9a9 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -18,7 +18,7 @@ process DEEPTOOLS_PLOTFINGERPRINT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ plotFingerprint \\ diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index f981744e..992c9058 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -17,7 +17,7 @@ process DEEPTOOLS_PLOTHEATMAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plotHeatmap \\ $args \\ diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index b32e04d3..60184fa6 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -17,7 +17,7 @@ process DEEPTOOLS_PLOTPROFILE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plotProfile \\ $args \\ diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index d4aa1adb..fc04cda7 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -19,7 +19,7 @@ process DELLY_CALL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ delly \\ call \\ diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 015be864..c7342767 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -19,7 +19,7 @@ process DIAMOND_BLASTP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index f4018aa9..bd7d1dd9 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -19,7 +19,7 @@ process DIAMOND_BLASTX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 7cc5da22..d506a4b6 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -16,7 +16,7 @@ process DSHBIO_EXPORTSEGMENTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ export-segments \\ diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 065d8bec..6480f4a4 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -16,7 +16,7 @@ process DSHBIO_FILTERBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-bed \\ diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index c738c95a..a0bbf3af 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -16,7 +16,7 @@ process DSHBIO_FILTERGFF3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-gff3 \\ diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 60b8b7a3..8dbf1104 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -16,7 +16,7 @@ process DSHBIO_SPLITBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-bed \\ diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index 7ad2fd08..fc868a39 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -16,7 +16,7 @@ process DSHBIO_SPLITGFF3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-gff3 \\ diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf index 5f458eb9..0e040958 100644 --- a/modules/ectyper/main.nf +++ b/modules/ectyper/main.nf @@ -18,7 +18,7 @@ process ECTYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed = fasta.getName().endsWith(".gz") ? true : false def fasta_name = fasta.getName().replace(".gz", "") """ diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf index 9cf98694..70dabfb7 100644 --- a/modules/emmtyper/main.nf +++ b/modules/emmtyper/main.nf @@ -16,7 +16,7 @@ process EMMTYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ emmtyper \\ $args \\ diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 76cd9235..3182feb2 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -24,7 +24,7 @@ process ENSEMBLVEP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 2ef00d17..4db78230 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -18,7 +18,7 @@ process EXPANSIONHUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ ExpansionHunter \\ diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 7e3721bd..cc1c4902 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -17,7 +17,7 @@ process FASTANI { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.batch_input) { """ diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index 05eb1e98..33603842 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -24,7 +24,7 @@ process FASTP { script: def args = task.ext.args ?: '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 673a00b8..d250eca0 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -18,7 +18,7 @@ process FASTQC { script: def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf index 768728f2..a0dcc46a 100644 --- a/modules/fastqscan/main.nf +++ b/modules/fastqscan/main.nf @@ -16,7 +16,7 @@ process FASTQSCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ zcat $reads | \\ fastq-scan \\ diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index f514b69a..3aab935b 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -16,7 +16,7 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ CallMolecularConsensusReads \\ diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf index 40713d03..126c3dd8 100644 --- a/modules/fgbio/fastqtobam/main.nf +++ b/modules/fgbio/fastqtobam/main.nf @@ -17,7 +17,7 @@ process FGBIO_FASTQTOBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir tmp diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf index b35186a5..47f000a5 100644 --- a/modules/fgbio/groupreadsbyumi/main.nf +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -18,7 +18,7 @@ process FGBIO_GROUPREADSBYUMI { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir tmp diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index c2822548..c542f3df 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -16,7 +16,7 @@ process FGBIO_SORTBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ SortBam \\ diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf index 10e147a6..bb1c1eb3 100644 --- a/modules/filtlong/main.nf +++ b/modules/filtlong/main.nf @@ -16,7 +16,7 @@ process FILTLONG { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" """ filtlong \\ diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 23bd1892..7bc38c97 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -15,7 +15,7 @@ process FLASH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ flash \\ $args \\ diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf index b9a63d02..1dd91fef 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/main.nf @@ -22,7 +22,7 @@ process FREEBAYES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index f93dd574..bd428d6c 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -20,7 +20,7 @@ process GATK4_APPLYBQSR { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index b422a798..9b0bf286 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -22,7 +22,7 @@ process GATK4_BASERECALIBRATOR { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 77819a0f..c4538034 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -17,7 +17,7 @@ process GATK4_BEDTOINTERVALLIST { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk BedToIntervalList \\ -I $bed \\ diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 93a2ee57..7c112c3c 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -18,7 +18,7 @@ process GATK4_CALCULATECONTAMINATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' """ diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 9bc8d1d0..2860e82e 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -20,7 +20,7 @@ process GATK4_CREATESOMATICPANELOFNORMALS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk \\ CreateSomaticPanelOfNormals \\ diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index b0b35e42..f636dc46 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -19,7 +19,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") def avail_mem = 3 diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index fc075735..915eb996 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -16,7 +16,7 @@ process GATK4_FASTQTOSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" """ gatk FastqToSam \\ diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 7111db37..02fa804f 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -21,7 +21,7 @@ process GATK4_FILTERMUTECTCALLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def orientationbias_options = '' if (orientationbias) { diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index ddb4a922..f0b35447 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -22,7 +22,7 @@ process GATK4_GENOTYPEGVCFS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 0894e17b..99be601f 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -19,7 +19,7 @@ process GATK4_GETPILEUPSUMMARIES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def sitesCommand = '' sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 418a2785..e00f1e58 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -23,7 +23,7 @@ process GATK4_HAPLOTYPECALLER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval_option = interval ? "-L ${interval}" : "" def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" def avail_mem = 3 diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 8e5b70e1..7e1a47f7 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -16,7 +16,7 @@ process GATK4_INTERVALLISTTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir ${prefix}_split diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index 5e9700e3..ac021afa 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -16,7 +16,7 @@ process GATK4_LEARNREADORIENTATIONMODEL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } """ diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 9f0b46da..a109facc 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -18,7 +18,7 @@ process GATK4_MARKDUPLICATES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") def avail_mem = 3 if (!task.memory) { diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 01effb0f..5e552cb2 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -19,7 +19,7 @@ process GATK4_MERGEBAMALIGNMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk MergeBamAlignment \\ ALIGNED=$aligned \\ diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index cbfc2e9d..cd1840c3 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -18,7 +18,7 @@ process GATK4_MERGEVCFS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of VCFs to merge def input = "" diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 662b3f0c..2cf940de 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -30,7 +30,7 @@ process GATK4_MUTECT2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def panels_command = '' def normals_command = '' diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index bca31a29..638b7705 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -16,7 +16,7 @@ process GATK4_REVERTSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk RevertSam \\ I=$bam \\ diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index aa9a6b2d..a909f540 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -16,7 +16,7 @@ process GATK4_SAMTOFASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" """ gatk SamToFastq \\ diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 32d36df9..65b82a35 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -19,7 +19,7 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk SplitNCigarReads \\ -R $fasta \\ diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index d5cc1eb3..00dc2588 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -20,7 +20,7 @@ process GATK4_VARIANTFILTRATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index dfbebd3a..d9deea3c 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -26,7 +26,7 @@ process GENRICH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def control = control_bam ? "-c $control_bam" : '' def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index e36729b2..b8afca22 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -16,7 +16,7 @@ process GLNEXUS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of GVCFs to merge def input = gvcfs.collect { it.toString() } diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index e0f2d4cd..554e585b 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -19,7 +19,7 @@ process GRAPHMAP2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ graphmap2 \\ align \\ diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index d8a64113..1c06692d 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -26,7 +26,7 @@ process GSTAMA_COLLAPSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_collapse.py \\ -s $bam \\ diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf index 4a8e829c..53ff93e4 100644 --- a/modules/gstama/merge/main.nf +++ b/modules/gstama/merge/main.nf @@ -20,7 +20,7 @@ process GSTAMA_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_merge.py \\ -f $filelist \\ diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf index 6ac681ad..8508c9f0 100644 --- a/modules/gunc/run/main.nf +++ b/modules/gunc/run/main.nf @@ -18,7 +18,7 @@ process GUNC_RUN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gunc \\ run \\ diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index ed1d7797..a96343f6 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -20,7 +20,7 @@ process HICAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def database_args = database_dir ? "--database_dir ${database_dir}" : "" def model_args = model_fp ? "--model_fp ${model_fp}" : "" def is_compressed = fasta.getName().endsWith(".gz") ? true : false diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 7fc857f1..208554d6 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -27,7 +27,7 @@ process HIFIASM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (use_parental_kmers) { """ hifiasm \\ diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 0c5f4134..ae888616 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -22,7 +22,7 @@ process HISAT2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 6cd776a1..6399b1a2 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -18,7 +18,7 @@ process HMMCOPY_READCOUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ readCounter \\ $args \\ diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index a25871e8..e6d04044 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -17,7 +17,7 @@ process HMMER_HMMALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ $fastacmd | \\ diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 321dbc7c..84e0241a 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -20,7 +20,7 @@ process HOMER_ANNOTATEPEAKS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ annotatePeaks.pl \\ $peak \\ diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index a39fe753..66de06b6 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -18,7 +18,7 @@ process HOMER_FINDPEAKS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ findPeaks \\ diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 44490d50..72e2091f 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -19,7 +19,7 @@ process HOMER_MAKETAGDIRECTORY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ makeTagDirectory \\ tag_dir \\ diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index 8a0e3f37..17e86947 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -18,7 +18,7 @@ process HOMER_MAKEUCSCFILE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ makeUCSCfile \\ $tagDir \\ diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf index 0c8c1952..5fee90c2 100644 --- a/modules/imputeme/vcftoprs/main.nf +++ b/modules/imputeme/vcftoprs/main.nf @@ -16,7 +16,7 @@ process IMPUTEME_VCFTOPRS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ #!/usr/bin/env Rscript diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf index 4a33261b..a51cc01e 100644 --- a/modules/ismapper/main.nf +++ b/modules/ismapper/main.nf @@ -16,7 +16,7 @@ process ISMAPPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ismap \\ $args \\ diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index 27d5c3d8..fdd47971 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -26,7 +26,7 @@ process ISOSEQ3_CLUSTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ cluster \\ diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf index 5bde2f8f..5044cba2 100644 --- a/modules/isoseq3/refine/main.nf +++ b/modules/isoseq3/refine/main.nf @@ -21,7 +21,7 @@ process ISOSEQ3_REFINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ refine \\ diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 4a657756..58d97c8c 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -20,7 +20,7 @@ process IVAR_CONSENSUS { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ samtools mpileup \\ diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 35798123..4d0c70a2 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -18,7 +18,7 @@ process IVAR_TRIM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ivar trim \\ $args \\ diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index ba791307..ce4abd4d 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -20,7 +20,7 @@ process IVAR_VARIANTS { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" """ diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf index 02f1947f..e4bdf98b 100644 --- a/modules/jupyternotebook/main.nf +++ b/modules/jupyternotebook/main.nf @@ -24,7 +24,7 @@ process JUPYTERNOTEBOOK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 00ca8971..d67eba31 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -22,7 +22,7 @@ process KALLISTOBUSTOOLS_COUNT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' """ diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index b64a0c45..998eced1 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -16,7 +16,7 @@ process KLEBORATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ kleborate \\ $args \\ diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index e5fb4b80..3c4d1caf 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -19,7 +19,7 @@ process KRAKEN2_KRAKEN2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index 51667378..e8857403 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -18,7 +18,7 @@ process LAST_DOTPLOT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ last-dotplot \\ $args \\ diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index 4b90a965..b5ac8bfe 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -17,7 +17,7 @@ process LAST_LASTAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index ff6485dc..e9895c5c 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -16,7 +16,7 @@ process LAST_LASTDB { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir lastdb lastdb \\ diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index f1a7312e..ca60e7fe 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -25,7 +25,7 @@ process LAST_MAFCONVERT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ maf-convert $args $format $maf | gzip --no-name \\ > ${prefix}.${format}.gz diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index c66e47d4..0a58b027 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -16,7 +16,7 @@ process LAST_MAFSWAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ maf-swap $args $maf | gzip --no-name > ${prefix}.swapped.maf.gz diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index e4f4390a..fb097a11 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -16,7 +16,7 @@ process LAST_POSTMASK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ last-postmask $args $maf | gzip --no-name > ${prefix}.maf.gz diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index ecc47e80..60ed135b 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -16,7 +16,7 @@ process LAST_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ zcat < $maf | last-split $args | gzip --no-name > ${prefix}.maf.gz diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index 0a949857..471db7c1 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -17,7 +17,7 @@ process LAST_TRAIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 64f6d87d..a662a7bb 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -30,7 +30,7 @@ process LIMA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ OUT_EXT="" diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf index b5cd2b68..667697ef 100644 --- a/modules/lissero/main.nf +++ b/modules/lissero/main.nf @@ -16,7 +16,7 @@ process LISSERO { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lissero \\ $args \\ diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index 74995152..d7fd078b 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -17,7 +17,7 @@ process LOFREQ_CALL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call \\ diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 63ae2886..764efcc5 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -18,7 +18,7 @@ process LOFREQ_CALLPARALLEL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call-parallel \\ diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 6f13ae44..34a5aef8 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -16,7 +16,7 @@ process LOFREQ_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ filter \\ diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index bf04c5d2..5e5b8f44 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -17,7 +17,7 @@ process LOFREQ_INDELQUAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq indelqual \\ $args \\ diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index e8bfcda0..c5c88f8e 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -22,7 +22,7 @@ process MACS2_CALLPEAK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def format = meta.single_end ? 'BAM' : 'BAMPE' def control = controlbam ? "--control $controlbam" : '' diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index 553f0be9..2a8c0acc 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -25,7 +25,7 @@ process MANTA_GERMLINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index 38d73133..1d62635b 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -27,7 +27,7 @@ process MANTA_SOMATIC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index dc72fcc4..63f7a840 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -25,7 +25,7 @@ process MANTA_TUMORONLY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf index 3673970e..e3668fda 100644 --- a/modules/mapdamage2/main.nf +++ b/modules/mapdamage2/main.nf @@ -34,7 +34,7 @@ process MAPDAMAGE2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mapDamage \\ $args \\ diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index 0c0b6e17..d93641f7 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -16,7 +16,7 @@ process MASH_SKETCH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mash \\ sketch \\ diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf index 6728e3ce..5da2f805 100644 --- a/modules/mashtree/main.nf +++ b/modules/mashtree/main.nf @@ -17,7 +17,7 @@ process MASHTREE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mashtree \\ $args \\ diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index e13af704..4d384391 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -23,7 +23,7 @@ process MAXBIN2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ run_MaxBin.pl \\ diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf index e7a8b9cc..761b1c34 100644 --- a/modules/medaka/main.nf +++ b/modules/medaka/main.nf @@ -16,7 +16,7 @@ process MEDAKA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ medaka_consensus \\ -t $task.cpus \\ diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf index 011fa7d3..7b511883 100644 --- a/modules/megahit/main.nf +++ b/modules/megahit/main.nf @@ -21,7 +21,7 @@ process MEGAHIT { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ megahit \\ diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf index 5dde5633..c3b65b9d 100644 --- a/modules/meningotype/main.nf +++ b/modules/meningotype/main.nf @@ -16,7 +16,7 @@ process MENINGOTYPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ meningotype \\ $args \\ diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf index e35d6715..4a5869b6 100644 --- a/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -16,7 +16,7 @@ process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ export OMP_NUM_THREADS=$task.cpus diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index d158af91..2d01fdf6 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -17,7 +17,7 @@ process METABAT2_METABAT2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def decompress_depth = depth ? "gzip -d -f $depth" : "" def depth_file = depth ? "-a ${depth.baseName}" : "" """ diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 9463da6f..64965af3 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -19,7 +19,7 @@ process METAPHLAN3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index 1b4b14c4..021f76f1 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -18,7 +18,7 @@ process METHYLDACKEL_MBIAS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ MethylDackel mbias \\ $args \\ diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 8516ef6e..ceff67c5 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -18,7 +18,7 @@ process MINIA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_list = reads.join(",") """ echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf index 35c2e2c0..b0db6925 100644 --- a/modules/miniasm/main.nf +++ b/modules/miniasm/main.nf @@ -17,7 +17,7 @@ process MINIASM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ miniasm \\ $args \\ diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index c6c0c316..500250e9 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -17,7 +17,7 @@ process MINIMAP2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ minimap2 \\ diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf index aa338420..b2983b82 100644 --- a/modules/mlst/main.nf +++ b/modules/mlst/main.nf @@ -16,7 +16,7 @@ process MLST { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mlst \\ --threads $task.cpus \\ diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index b25e6a3d..d2669b7e 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -24,7 +24,7 @@ process MOSDEPTH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ mosdepth \\ diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index 2419a0a1..223b4f44 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -16,7 +16,7 @@ process MSISENSOR_SCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ scan \\ diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf index b8663469..83d6ea2b 100644 --- a/modules/mtnucratio/main.nf +++ b/modules/mtnucratio/main.nf @@ -18,7 +18,7 @@ process MTNUCRATIO { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mtnucratio \\ diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf index f4f3bb18..39ad3e8b 100644 --- a/modules/mummer/main.nf +++ b/modules/mummer/main.nf @@ -18,7 +18,7 @@ process MUMMER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index a50f5cb3..6d549aaa 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -23,7 +23,7 @@ process MUSCLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fasta_out = args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' def clw_out = args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' def msf_out = args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index f29eeb77..0ad0f799 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -18,7 +18,7 @@ process NANOLYSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf index 466c8d09..9897c861 100644 --- a/modules/ncbigenomedownload/main.nf +++ b/modules/ncbigenomedownload/main.nf @@ -29,7 +29,7 @@ process NCBIGENOMEDOWNLOAD { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def accessions_opt = accessions ? "-A ${accessions}" : "" """ ncbi-genome-download \\ diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf index 0884b55c..7d04031c 100644 --- a/modules/ngmaster/main.nf +++ b/modules/ngmaster/main.nf @@ -16,7 +16,7 @@ process NGMASTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ngmaster \\ $args \\ diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf index bb5dcb7d..4e296515 100644 --- a/modules/nucmer/main.nf +++ b/modules/nucmer/main.nf @@ -17,7 +17,7 @@ process NUCMER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def is_compressed_query = query.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index 5ee9dc43..fe59e155 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_DEDUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools dedup \\ $args \\ diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 452800cc..376191ce 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_FLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ flip \\ diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index 1d34d42c..7bd778c9 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -18,7 +18,7 @@ process PAIRTOOLS_PARSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ parse \\ diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index 9fcc245c..8759f709 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_RESTRICT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ restrict \\ diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index f699afa3..a6d62ba7 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_SELECT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools select \\ "$args" \\ diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 5caa5b74..d5996dd0 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -16,7 +16,7 @@ process PAIRTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ pairtools \\ diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index 99a68e09..5ee2b2e0 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -16,7 +16,7 @@ process PANGOLIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pangolin \\ $fasta\\ diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf index a2003834..1623ea89 100644 --- a/modules/paraclu/main.nf +++ b/modules/paraclu/main.nf @@ -19,7 +19,7 @@ process PARACLU { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf index 970128cb..e0525cb1 100644 --- a/modules/pbbam/pbmerge/main.nf +++ b/modules/pbbam/pbmerge/main.nf @@ -17,7 +17,7 @@ process PBBAM_PBMERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pbmerge \\ -o ${prefix}.bam \\ diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 83e56d96..440fbc72 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -22,7 +22,7 @@ process PBCCS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ccs \\ $bam \\ diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf index 0a6c3384..d64c3762 100644 --- a/modules/peddy/main.nf +++ b/modules/peddy/main.nf @@ -20,7 +20,7 @@ process PEDDY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ peddy \\ $args \\ diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index f2edabc3..6fe34cc2 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -20,7 +20,7 @@ process PHANTOMPEAKQUALTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf index c507dd14..9ebc40de 100644 --- a/modules/phyloflash/main.nf +++ b/modules/phyloflash/main.nf @@ -18,7 +18,7 @@ process PHYLOFLASH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ phyloFlash.pl \\ @@ -58,7 +58,7 @@ process PHYLOFLASH { } stub: - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir ${prefix} touch ${prefix}/${prefix}.SSU.collection.fasta diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index adb82d8c..3705b8fb 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -20,7 +20,7 @@ process PICARD_COLLECTHSMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "-R $fasta" : "" def avail_mem = 3 diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index f52f5885..6b292534 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -18,7 +18,7 @@ process PICARD_COLLECTMULTIPLEMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectMultipleMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 94745d2d..eddb4604 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -17,7 +17,7 @@ process PICARD_COLLECTWGSMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectWgsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 8b1d2e6b..d8de137b 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -17,7 +17,7 @@ process PICARD_FILTERSAMREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index d4c5886f..d3bf6938 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -18,7 +18,7 @@ process PICARD_MARKDUPLICATES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 3a2fc620..86796593 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -16,7 +16,7 @@ process PICARD_MERGESAMFILES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index b264b927..eb3caf40 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -17,7 +17,7 @@ process PICARD_SORTSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 3bbb1d64..70de52e6 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -17,7 +17,7 @@ process PIRATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ PIRATE \\ $args \\ diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf index 34b12fca..9b8a52f3 100644 --- a/modules/plink/extract/main.nf +++ b/modules/plink/extract/main.nf @@ -18,7 +18,7 @@ process PLINK_EXTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" """ plink \\ diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index b6fd03d7..719e90d2 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -19,7 +19,7 @@ process PLINK_VCF { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plink \\ diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf index 8101f7dd..078ece1e 100644 --- a/modules/plink2/vcf/main.nf +++ b/modules/plink2/vcf/main.nf @@ -18,7 +18,7 @@ process PLINK2_VCF { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plink2 \\ $args \\ diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf index 301f9206..0b3bcbc6 100644 --- a/modules/pmdtools/filter/main.nf +++ b/modules/pmdtools/filter/main.nf @@ -21,7 +21,7 @@ process PMDTOOLS_FILTER { def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' def split_cpus = Math.floor(task.cpus/2) - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" //threshold and header flags activate filtering function of pmdtools """ diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf index 2edc5c78..249efad9 100644 --- a/modules/porechop/main.nf +++ b/modules/porechop/main.nf @@ -16,7 +16,7 @@ process PORECHOP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ porechop \\ -i $reads \\ diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index 43f86cf8..b5bd0620 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -18,7 +18,7 @@ process PRESEQ_LCEXTRAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ preseq \\ diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index c55616db..c06c592c 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -16,7 +16,7 @@ process PYDAMAGE_ANALYZE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ analyze \\ diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 2e0afac9..ab0b2115 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -16,7 +16,7 @@ process PYDAMAGE_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index 9f53f0cb..7d81952d 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -17,7 +17,7 @@ process QCAT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ## Unzip fastq file ## qcat doesn't support zipped files yet diff --git a/modules/racon/main.nf b/modules/racon/main.nf index 5936fac0..9be5ce63 100644 --- a/modules/racon/main.nf +++ b/modules/racon/main.nf @@ -16,7 +16,7 @@ process RACON { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ racon -t "$task.cpus" \\ "${reads}" \\ diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index b43792ee..c2893d18 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -17,7 +17,7 @@ process RASUSA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ rasusa \\ diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf index 9a7db505..f8183216 100644 --- a/modules/rmarkdownnotebook/main.nf +++ b/modules/rmarkdownnotebook/main.nf @@ -25,7 +25,7 @@ process RMARKDOWNNOTEBOOK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params diff --git a/modules/roary/main.nf b/modules/roary/main.nf index a05973eb..edda3281 100644 --- a/modules/roary/main.nf +++ b/modules/roary/main.nf @@ -17,7 +17,7 @@ process ROARY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ roary \\ $args \\ diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index d9d3fa36..1141a13f 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -16,7 +16,7 @@ process RSEQC_BAMSTAT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bam_stat.py \\ -i $bam \\ diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index 3b879cfb..2243c43e 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -17,7 +17,7 @@ process RSEQC_INFEREXPERIMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ infer_experiment.py \\ -i $bam \\ diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 88bec499..425737d6 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -21,7 +21,7 @@ process RSEQC_INNERDISTANCE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (!meta.single_end) { """ inner_distance.py \\ diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index b6949641..d2562e5c 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -23,7 +23,7 @@ process RSEQC_JUNCTIONANNOTATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_annotation.py \\ -i $bam \\ diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index 58451d2e..695762b5 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -18,7 +18,7 @@ process RSEQC_JUNCTIONSATURATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_saturation.py \\ -i $bam \\ diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 74af618d..333193e3 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -17,7 +17,7 @@ process RSEQC_READDISTRIBUTION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ read_distribution.py \\ -i $bam \\ diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index 80fcb150..134f2e8d 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -19,7 +19,7 @@ process RSEQC_READDUPLICATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ read_duplication.py \\ -i $bam \\ diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf index 8445b0d0..c6573283 100644 --- a/modules/samblaster/main.nf +++ b/modules/samblaster/main.nf @@ -18,7 +18,7 @@ process SAMBLASTER { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" """ samtools view -h $args2 $bam | \\ diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 87d6ff8b..55a2f736 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -21,7 +21,7 @@ process SAMTOOLS_AMPLICONCLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" """ diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 20e83a14..689eb960 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -17,7 +17,7 @@ process SAMTOOLS_BAM2FQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (split){ """ diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index f336547f..ebf029aa 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_DEPTH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools \\ diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index bdbf53e4..212e804e 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_FASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" """ diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index 180833f4..8f86c1c4 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_FIXMATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" """ diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 5f6e2d49..c40f46d1 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -17,7 +17,7 @@ process SAMTOOLS_MPILEUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools mpileup \\ --fasta-ref $fasta \\ diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 623f10b6..0c2cf25e 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools sort $args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 464edd09..619b84dc 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -18,7 +18,7 @@ process SAMTOOLS_VIEW { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf index 8fed0119..ca33041d 100644 --- a/modules/scoary/main.nf +++ b/modules/scoary/main.nf @@ -17,7 +17,7 @@ process SCOARY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def newick_tree = tree ? "-n ${tree}" : "" """ scoary \\ diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 328e4e6c..12b9205f 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -19,7 +19,7 @@ process SEACR_CALLPEAK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf index a8dd731e..0a7aa6ad 100644 --- a/modules/seqsero2/main.nf +++ b/modules/seqsero2/main.nf @@ -18,7 +18,7 @@ process SEQSERO2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ SeqSero2_package.py \\ $args \\ diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf index 954bed5c..299c9ea4 100644 --- a/modules/seqtk/mergepe/main.nf +++ b/modules/seqtk/mergepe/main.nf @@ -16,7 +16,7 @@ process SEQTK_MERGEPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ ln -s ${reads} ${prefix}.fastq.gz diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 83a107d0..96e08fd4 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -17,7 +17,7 @@ process SEQTK_SAMPLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ seqtk \\ diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 9082d426..ce9d1962 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -18,7 +18,7 @@ process SEQUENZAUTILS_BAM2SEQZ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ bam2seqz \\ diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index 43358c43..a6fcb559 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -16,7 +16,7 @@ process SEQUENZAUTILS_GCWIGGLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ gc_wiggle \\ diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index fb25a96e..089f3478 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -19,7 +19,7 @@ process SEQWISH_INDUCE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ seqwish \\ --threads $task.cpus \\ diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index de79e89b..c8d61161 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -16,7 +16,7 @@ process SNPDISTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ snp-dists \\ $args \\ diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 2cd023f6..d0ec993e 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -28,7 +28,7 @@ process SNPEFF { } else { avail_mem = task.memory.giga } - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" """ snpEff \\ diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 83cd8092..5c0950d8 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -18,7 +18,7 @@ process SORTMERNA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ sortmerna \\ diff --git a/modules/spades/main.nf b/modules/spades/main.nf index 4663ec55..ba690d35 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -22,7 +22,7 @@ process SPADES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def maxmem = task.memory.toGiga() def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index d7c75ba6..e0ba8d13 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -18,7 +18,7 @@ process SPATYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ spaTyper \\ diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index f33634ae..dbb61a27 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -16,7 +16,7 @@ process STAPHOPIASCCMEC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ staphopia-sccmec --assembly $fasta $args > ${prefix}.tsv diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index 46023d3e..9725496f 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -32,7 +32,7 @@ process STAR_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def ignore_gtf = star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" def seq_platform = seq_platform ? "'PL:$seq_platform'" : "" def seq_center = seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index e991db67..324be6df 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -23,7 +23,7 @@ process STRELKA_GERMLINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index fa138633..a9766d01 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -23,7 +23,7 @@ process STRELKA_SOMATIC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 4367a84d..9d62a966 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -20,7 +20,7 @@ process STRINGTIE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 43a7f8cd..53eb279e 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -17,7 +17,7 @@ process SUBREAD_FEATURECOUNTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-p' def strandedness = 0 diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 13f9a942..ed9362b2 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -16,7 +16,7 @@ process TABIX_BGZIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 9a633d2e..20b47a9f 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -17,7 +17,7 @@ process TABIX_BGZIPTABIX { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bgzip -c $args $input > ${prefix}.gz tabix $args2 ${prefix}.gz diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index e9bb9b5d..c5a1ca0f 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -18,7 +18,7 @@ process TIDDIT_COV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index 83a46f82..08eecc01 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -20,7 +20,7 @@ process TIDDIT_SV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 86761ad8..ee40b780 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -38,7 +38,7 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 937eabd6..742798b3 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -19,7 +19,7 @@ process UCSC_BED12TOBIGBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedToBigBed \\ $bed \\ diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index 1d46342c..dacd7260 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -19,7 +19,7 @@ process UCSC_BEDCLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedClip \\ $bedgraph \\ diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index e18b41bc..9ba306ab 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -19,7 +19,7 @@ process UCSC_BEDGRAPHTOBIGWIG { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedGraphToBigWig \\ $bedgraph \\ diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 8c6f1178..1e97c83d 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -19,7 +19,7 @@ process UCSC_BIGWIGAVERAGEOVERBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // BUG: bigWigAverageOverBed cannot handle ensembl seqlevels style """ bigWigAverageOverBed \\ diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf index 1c667262..3dd9531e 100644 --- a/modules/ucsc/liftover/main.nf +++ b/modules/ucsc/liftover/main.nf @@ -20,7 +20,7 @@ process UCSC_LIFTOVER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ liftOver \\ diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index 5df34121..f2dcb543 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -18,7 +18,7 @@ process ULTRA_PIPELINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ uLTRA \\ pipeline \\ diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 287bb8c2..ce21437d 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -16,7 +16,7 @@ process UMITOOLS_DEDUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index 3c2402e2..fba8f054 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -17,7 +17,7 @@ process UMITOOLS_EXTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ umi_tools \\ diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 14319dc1..1ccc72a9 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -18,7 +18,7 @@ process UNICYCLER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" def long_reads = longreads ? "-l $longreads" : "" """ diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index 3d354016..11059a9a 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -18,7 +18,7 @@ process VARIANTBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ variant \\ $bam \\ diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 62fff0cf..fbe646ca 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -83,7 +83,7 @@ process VCFTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def bed_arg = (args.contains('--bed')) ? "--bed ${bed}" : diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 6e7f433b..4539033d 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -17,7 +17,7 @@ process YARA_MAPPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ yara_mapper \\ diff --git a/tests/modules/bbmap/bbduk/nextflow.config b/tests/modules/bbmap/bbduk/nextflow.config index 8940a9be..46fc33b4 100644 --- a/tests/modules/bbmap/bbduk/nextflow.config +++ b/tests/modules/bbmap/bbduk/nextflow.config @@ -4,7 +4,7 @@ process { withName: BBMAP_BBDUK { ext.args = 'trimq=10 qtrim=r' - ext.suffix = '.trim' + ext.prefix = { "${meta.id}.trim" } } } diff --git a/tests/modules/bcftools/reheader/nextflow.config b/tests/modules/bcftools/reheader/nextflow.config index a377b26d..55d2cff8 100644 --- a/tests/modules/bcftools/reheader/nextflow.config +++ b/tests/modules/bcftools/reheader/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BCFTOOLS_REHEADER { - ext.suffix = '.updated' + ext.prefix = { "${meta.id}.updated" } } } diff --git a/tests/modules/bedtools/complement/nextflow.config b/tests/modules/bedtools/complement/nextflow.config index 561fdead..cb867120 100644 --- a/tests/modules/bedtools/complement/nextflow.config +++ b/tests/modules/bedtools/complement/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_COMPLEMENT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/genomecov/nextflow.config b/tests/modules/bedtools/genomecov/nextflow.config index bc0e4aaf..6e1c03e2 100644 --- a/tests/modules/bedtools/genomecov/nextflow.config +++ b/tests/modules/bedtools/genomecov/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_GENOMECOV { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/intersect/nextflow.config b/tests/modules/bedtools/intersect/nextflow.config index c7d0c826..3aa2593f 100644 --- a/tests/modules/bedtools/intersect/nextflow.config +++ b/tests/modules/bedtools/intersect/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_INTERSECT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/merge/nextflow.config b/tests/modules/bedtools/merge/nextflow.config index e7d635dd..545a523d 100644 --- a/tests/modules/bedtools/merge/nextflow.config +++ b/tests/modules/bedtools/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_MERGE { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/slop/nextflow.config b/tests/modules/bedtools/slop/nextflow.config index 5dc03727..09abb51a 100644 --- a/tests/modules/bedtools/slop/nextflow.config +++ b/tests/modules/bedtools/slop/nextflow.config @@ -4,7 +4,7 @@ process { withName: BEDTOOLS_SLOP { ext.args = '-l 15 -r 30' - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/sort/nextflow.config b/tests/modules/bedtools/sort/nextflow.config index 6bb73232..2ecc295a 100644 --- a/tests/modules/bedtools/sort/nextflow.config +++ b/tests/modules/bedtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_SORT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/diamond/blastp/nextflow.config b/tests/modules/diamond/blastp/nextflow.config index d1222d49..5a9aacad 100644 --- a/tests/modules/diamond/blastp/nextflow.config +++ b/tests/modules/diamond/blastp/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DIAMOND_BLASTP { - ext.suffix = '.diamond_blastp' + ext.prefix = { "${meta.id}.diamond_blastp" } } } diff --git a/tests/modules/diamond/blastx/nextflow.config b/tests/modules/diamond/blastx/nextflow.config index 83169455..25320af3 100644 --- a/tests/modules/diamond/blastx/nextflow.config +++ b/tests/modules/diamond/blastx/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DIAMOND_BLASTX { - ext.suffix = '.diamond_blastx' + ext.prefix = { "${meta.id}.diamond_blastx" } } } diff --git a/tests/modules/dshbio/filterbed/nextflow.config b/tests/modules/dshbio/filterbed/nextflow.config index 2f1e5ab9..3937a184 100644 --- a/tests/modules/dshbio/filterbed/nextflow.config +++ b/tests/modules/dshbio/filterbed/nextflow.config @@ -4,6 +4,6 @@ process { withName: DSHBIO_FILTERBED { ext.args = '--range chr1:0-1000' - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/dshbio/filtergff3/nextflow.config b/tests/modules/dshbio/filtergff3/nextflow.config index c4b75eaf..80dcd28c 100644 --- a/tests/modules/dshbio/filtergff3/nextflow.config +++ b/tests/modules/dshbio/filtergff3/nextflow.config @@ -4,7 +4,7 @@ process { withName: DSHBIO_FILTERGFF3 { ext.args = '--range MT192765.1:0-1000' - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/dshbio/splitbed/nextflow.config b/tests/modules/dshbio/splitbed/nextflow.config index 4369c509..ad9c045b 100644 --- a/tests/modules/dshbio/splitbed/nextflow.config +++ b/tests/modules/dshbio/splitbed/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DSHBIO_SPLITBED { - ext.suffix = '.' + ext.prefix = { "${meta.id}." } ext.args = '--records 2' } diff --git a/tests/modules/dshbio/splitgff3/nextflow.config b/tests/modules/dshbio/splitgff3/nextflow.config index e31f8e13..f6a0b921 100644 --- a/tests/modules/dshbio/splitgff3/nextflow.config +++ b/tests/modules/dshbio/splitgff3/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DSHBIO_SPLITGFF3 { - ext.suffix = '.' + ext.prefix = { "${meta.id}." } ext.args = '--records 15' } diff --git a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config index 0a266da9..e6721ff6 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config +++ b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config @@ -4,12 +4,12 @@ process { withName: FGBIO_SORTBAM { ext.args = '-s TemplateCoordinate' - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } withName: FGBIO_CALLMOLECULARCONSENSUSREADS { ext.args = '-M 1' - ext.suffix = '_molreads' + ext.prefix = { "${meta.id}_molreads" } } } diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config index 6fda39ec..d73e78ad 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config +++ b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_CREATESOMATICPANELOFNORMALS { - ext.suffix = '.pon' + ext.prefix = { "${meta.id}.pon" } } } diff --git a/tests/modules/gatk4/filtermutectcalls/nextflow.config b/tests/modules/gatk4/filtermutectcalls/nextflow.config index c830fdc6..3d4148d2 100644 --- a/tests/modules/gatk4/filtermutectcalls/nextflow.config +++ b/tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_FILTERMUTECTCALLS { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/gatk4/genotypegvcfs/nextflow.config b/tests/modules/gatk4/genotypegvcfs/nextflow.config index aaa704da..97396a74 100644 --- a/tests/modules/gatk4/genotypegvcfs/nextflow.config +++ b/tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_GENOTYPEGVCFS { - ext.suffix = '.genotyped' + ext.prefix = { "${meta.id}.genotyped" } } } diff --git a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config index 3a74623a..463e2d54 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config +++ b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_LEARNREADORIENTATIONMODEL { - ext.suffix = '.artifact-prior' + ext.prefix = { "${meta.id}.artifact-prior" } } } diff --git a/tests/modules/gatk4/variantfiltration/nextflow.config b/tests/modules/gatk4/variantfiltration/nextflow.config index ff2feb9c..4b930f28 100644 --- a/tests/modules/gatk4/variantfiltration/nextflow.config +++ b/tests/modules/gatk4/variantfiltration/nextflow.config @@ -4,7 +4,7 @@ process { withName: GATK4_VARIANTFILTRATION { ext.args = "--filter-name \'test_filter\' --filter-expression \'MQ0 > 0\'" - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config index 00c052f5..c020f934 100644 --- a/tests/modules/gffread/nextflow.config +++ b/tests/modules/gffread/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GFFREAD { - ext.suffix = '.out' + ext.prefix = { "${meta.id}.out" } } } diff --git a/tests/modules/gstama/collapse/nextflow.config b/tests/modules/gstama/collapse/nextflow.config index 0455c8b2..a68f33f2 100644 --- a/tests/modules/gstama/collapse/nextflow.config +++ b/tests/modules/gstama/collapse/nextflow.config @@ -4,7 +4,7 @@ process { withName: GSTAMA_COLLAPSE { ext.args = '-x capped -b BAM' - ext.suffix = '_tc' + ext.prefix = { "${meta.id}_tc" } } } diff --git a/tests/modules/gstama/merge/nextflow.config b/tests/modules/gstama/merge/nextflow.config index a9c63fcf..e0d7c8ef 100644 --- a/tests/modules/gstama/merge/nextflow.config +++ b/tests/modules/gstama/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GSTAMA_MERGE { - ext.suffix = '_merged' + ext.prefix = { "${meta.id}_merged" } } } diff --git a/tests/modules/isoseq3/refine/nextflow.config b/tests/modules/isoseq3/refine/nextflow.config index 88f1bdc4..6a4dea9f 100644 --- a/tests/modules/isoseq3/refine/nextflow.config +++ b/tests/modules/isoseq3/refine/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ISOSEQ3_REFINE { - ext.suffix = '.refine' + ext.prefix = { "${meta.id}.refine" } } } diff --git a/tests/modules/last/postmask/nextflow.config b/tests/modules/last/postmask/nextflow.config index dc021264..70c3f35b 100644 --- a/tests/modules/last/postmask/nextflow.config +++ b/tests/modules/last/postmask/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: LAST_POSTMASK { - ext.suffix = '.postmask' + ext.prefix = { "${meta.id}.postmask" } } } diff --git a/tests/modules/last/split/nextflow.config b/tests/modules/last/split/nextflow.config index 8b31ca0f..6252ec14 100644 --- a/tests/modules/last/split/nextflow.config +++ b/tests/modules/last/split/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: LAST_SPLIT { - ext.suffix = '.split' + ext.prefix = { "${meta.id}.split" } } } diff --git a/tests/modules/lima/nextflow.config b/tests/modules/lima/nextflow.config index 5091b034..8da2613f 100644 --- a/tests/modules/lima/nextflow.config +++ b/tests/modules/lima/nextflow.config @@ -4,7 +4,7 @@ process { withName: LIMA { ext.args = '--isoseq --peek-guess' - ext.suffix = '.fl' + ext.prefix = { "${meta.id}.fl" } } } diff --git a/tests/modules/lofreq/indelqual/nextflow.config b/tests/modules/lofreq/indelqual/nextflow.config index b9ad2787..c50c1363 100644 --- a/tests/modules/lofreq/indelqual/nextflow.config +++ b/tests/modules/lofreq/indelqual/nextflow.config @@ -4,7 +4,7 @@ process { withName: LOFREQ_INDELQUAL { ext.args = '--dindel' - ext.suffix = '.indelqual' + ext.prefix = { "${meta.id}.indelqual" } } } diff --git a/tests/modules/medaka/nextflow.config b/tests/modules/medaka/nextflow.config index 1f89be62..c0b1b507 100644 --- a/tests/modules/medaka/nextflow.config +++ b/tests/modules/medaka/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: MEDAKA { - ext.suffix = '.polished.genome' + ext.prefix = { "${meta.id}.polished.genome" } } } diff --git a/tests/modules/metaphlan3/nextflow.config b/tests/modules/metaphlan3/nextflow.config index 2dde2212..a47b46e0 100644 --- a/tests/modules/metaphlan3/nextflow.config +++ b/tests/modules/metaphlan3/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_VIEW { - ext.suffix = '.sam' + ext.prefix = { "${meta.id}.sam" } } withName: METAPHLAN3 { diff --git a/tests/modules/miniasm/nextflow.config b/tests/modules/miniasm/nextflow.config index 844a0120..23f0a8d0 100644 --- a/tests/modules/miniasm/nextflow.config +++ b/tests/modules/miniasm/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: MINIASM { - ext.suffix = '.assembly' + ext.prefix = { "${meta.id}.assembly" } } } diff --git a/tests/modules/nanolyse/nextflow.config b/tests/modules/nanolyse/nextflow.config index ede080cc..5f7b5bed 100644 --- a/tests/modules/nanolyse/nextflow.config +++ b/tests/modules/nanolyse/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: NANOLYSE { - ext.suffix = '.clean' + ext.prefix = { "${meta.id}.clean" } } } diff --git a/tests/modules/pairtools/dedup/nextflow.config b/tests/modules/pairtools/dedup/nextflow.config index 1de3348f..b47fab16 100644 --- a/tests/modules/pairtools/dedup/nextflow.config +++ b/tests/modules/pairtools/dedup/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_DEDUP { - ext.suffix = '.dedup' + ext.prefix = { "${meta.id}.dedup" } } } diff --git a/tests/modules/pairtools/parse/nextflow.config b/tests/modules/pairtools/parse/nextflow.config index 1a1182f6..a5d3ef9d 100644 --- a/tests/modules/pairtools/parse/nextflow.config +++ b/tests/modules/pairtools/parse/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_PARSE { - ext.suffix = '.raw' + ext.prefix = { "${meta.id}.raw" } } } diff --git a/tests/modules/pairtools/restrict/nextflow.config b/tests/modules/pairtools/restrict/nextflow.config index 857d7534..fa8217bc 100644 --- a/tests/modules/pairtools/restrict/nextflow.config +++ b/tests/modules/pairtools/restrict/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_RESTRICT { - ext.suffix = '.restrict' + ext.prefix = { "${meta.id}.restrict" } } } diff --git a/tests/modules/pairtools/sort/nextflow.config b/tests/modules/pairtools/sort/nextflow.config index 86b3d802..dfaf6053 100644 --- a/tests/modules/pairtools/sort/nextflow.config +++ b/tests/modules/pairtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_SORT { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/pbbam/pbmerge/nextflow.config b/tests/modules/pbbam/pbmerge/nextflow.config index c897068b..4fc270a9 100644 --- a/tests/modules/pbbam/pbmerge/nextflow.config +++ b/tests/modules/pbbam/pbmerge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PBBAM_PBMERGE { - ext.suffix = '.merged' + ext.prefix = { "${meta.id}.merged" } } } diff --git a/tests/modules/picard/filtersamreads/nextflow.config b/tests/modules/picard/filtersamreads/nextflow.config index e9ce4914..653e9633 100644 --- a/tests/modules/picard/filtersamreads/nextflow.config +++ b/tests/modules/picard/filtersamreads/nextflow.config @@ -3,11 +3,11 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PICARD_SORTSAM { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } withName: PICARD_FILTERSAMREADS { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/picard/sortsam/nextflow.config b/tests/modules/picard/sortsam/nextflow.config index 2c290cbe..ca572c2f 100644 --- a/tests/modules/picard/sortsam/nextflow.config +++ b/tests/modules/picard/sortsam/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PICARD_SORTSAM { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/plink/extract/nextflow.config b/tests/modules/plink/extract/nextflow.config index 12668b01..6a7f6d42 100644 --- a/tests/modules/plink/extract/nextflow.config +++ b/tests/modules/plink/extract/nextflow.config @@ -7,7 +7,7 @@ process { } withName: PLINK_EXTRACT { - ext.suffix = '.extract' + ext.prefix = { "${meta.id}.extract" } } } diff --git a/tests/modules/porechop/nextflow.config b/tests/modules/porechop/nextflow.config index 3a0536b0..85eb257a 100644 --- a/tests/modules/porechop/nextflow.config +++ b/tests/modules/porechop/nextflow.config @@ -4,7 +4,7 @@ process { withName: PORECHOP { ext.args = '' - ext.suffix = '_porechop' + ext.prefix = { "${meta.id}_porechop" } } } diff --git a/tests/modules/rasusa/nextflow.config b/tests/modules/rasusa/nextflow.config index fea844ae..50c32e5c 100644 --- a/tests/modules/rasusa/nextflow.config +++ b/tests/modules/rasusa/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: RASUSA { - ext.suffix = '_100X' + ext.prefix = { "${meta.id}_100X" } } } diff --git a/tests/modules/samblaster/nextflow.config b/tests/modules/samblaster/nextflow.config index 3018088b..7ba8b23b 100644 --- a/tests/modules/samblaster/nextflow.config +++ b/tests/modules/samblaster/nextflow.config @@ -4,7 +4,7 @@ process { withName: SAMBLASTER { ext.args = '-M --addMateTags' - ext.suffix = '.processed' + ext.prefix = { "${meta.id}.processed" } } } diff --git a/tests/modules/samtools/merge/nextflow.config b/tests/modules/samtools/merge/nextflow.config index cb350bf7..4ac70fa0 100644 --- a/tests/modules/samtools/merge/nextflow.config +++ b/tests/modules/samtools/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_MERGE { - ext.suffix = '_merged' + ext.prefix = { "${meta.id}_merged" } } } diff --git a/tests/modules/samtools/sort/nextflow.config b/tests/modules/samtools/sort/nextflow.config index 57ae6280..230bec5f 100644 --- a/tests/modules/samtools/sort/nextflow.config +++ b/tests/modules/samtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_SORT { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/seqtk/mergepe/nextflow.config b/tests/modules/seqtk/mergepe/nextflow.config index b14e72ff..04eeef72 100644 --- a/tests/modules/seqtk/mergepe/nextflow.config +++ b/tests/modules/seqtk/mergepe/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_MERGEPE { - ext.suffix = '.processed' + ext.prefix = { "${meta.id}.processed" } } } diff --git a/tests/modules/seqtk/sample/nextflow.config b/tests/modules/seqtk/sample/nextflow.config index 3efac50d..a79ad290 100644 --- a/tests/modules/seqtk/sample/nextflow.config +++ b/tests/modules/seqtk/sample/nextflow.config @@ -4,7 +4,7 @@ process { withName: SEQTK_SAMPLE { ext.args = '-s100' - ext.suffix = '.sampled' + ext.prefix = { "${meta.id}.sampled" } } } diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config index c61c4a74..24f16bad 100644 --- a/tests/modules/seqtk/subseq/nextflow.config +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_SUBSEQ { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/ucsc/bedclip/nextflow.config b/tests/modules/ucsc/bedclip/nextflow.config index 4adc3b8f..46af4b0a 100644 --- a/tests/modules/ucsc/bedclip/nextflow.config +++ b/tests/modules/ucsc/bedclip/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: UCSC_BEDCLIP { - ext.suffix = '.clip' + ext.prefix = { "${meta.id}.clip" } } } diff --git a/tests/modules/ultra/pipeline/nextflow.config b/tests/modules/ultra/pipeline/nextflow.config index a3b88ea3..16ed7f9b 100644 --- a/tests/modules/ultra/pipeline/nextflow.config +++ b/tests/modules/ultra/pipeline/nextflow.config @@ -4,7 +4,7 @@ process { withName: GFFREAD { ext.args = '--sort-alpha --keep-genes -T' - ext.suffix = '_sorted' + ext.prefix = { "${meta.id}_sorted" } } } From e2ba70ed9a1d2f09aa77b0744ea50b447c35f696 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 2 Dec 2021 08:27:20 -0600 Subject: [PATCH 280/314] Add Cell Ranger mkfastq, mkgtf, and count (#979) * feat(cellranger): Add initial count module Co-authored-by: Gisela Gabernet * feat(cellranger): Add mkgtf module * test(cellranger): Fix count test with mkgtf * fix(cellranger): Generalize gtf attribute filters * chore: Add .gitignore for cellranger tar * build(cellranger): Update dockerfile https://joshtronic.com/2021/09/12/fixed-repository-debian-security-buster-updates-changed-suite-from-stable-to-oldstable/ * Apply suggestions from code review Co-authored-by: Gisela Gabernet * Apply suggestions from code review Co-authored-by: Harshil Patel * Update modules/cellranger/mkgtf/main.nf Co-authored-by: Harshil Patel * style: Capitalize README * test(cellranger): Update pytest_modules * feat(cellranger): Add initial mkfastq module * ci: Update pytest modules * refactor(cellranger): Update modules to new syntax * docs(cellranger): Update meta files There is some terrible copy-pasting going on. * fix(cellranger): Add args Co-authored-by: Gisela Gabernet Co-authored-by: Harshil Patel --- modules/cellranger/.gitignore | 1 + modules/cellranger/Dockerfile | 2 +- modules/cellranger/{readme.md => README.md} | 0 modules/cellranger/count/main.nf | 49 +++++++++++++++ modules/cellranger/count/meta.yml | 40 +++++++++++++ modules/cellranger/mkfastq/main.nf | 31 ++++++++++ modules/cellranger/mkfastq/meta.yml | 38 ++++++++++++ modules/cellranger/mkgtf/main.nf | 31 ++++++++++ modules/cellranger/mkgtf/meta.yml | 31 ++++++++++ modules/cellranger/mkref/meta.yml | 60 +++++++++---------- tests/config/pytest_modules.yml | 18 +++++- tests/modules/cellranger/count/main.nf | 33 ++++++++++ .../modules/cellranger/count/nextflow.config | 31 ++++++++++ tests/modules/cellranger/count/test.yml | 19 ++++++ tests/modules/cellranger/mkfastq/main.nf | 26 ++++++++ .../cellranger/mkfastq/nextflow.config | 5 ++ tests/modules/cellranger/mkfastq/test.yml | 13 ++++ tests/modules/cellranger/mkgtf/main.nf | 11 ++++ .../modules/cellranger/mkgtf/nextflow.config | 27 +++++++++ tests/modules/cellranger/mkgtf/test.yml | 8 +++ 20 files changed, 441 insertions(+), 33 deletions(-) create mode 100644 modules/cellranger/.gitignore rename modules/cellranger/{readme.md => README.md} (100%) create mode 100644 modules/cellranger/count/main.nf create mode 100644 modules/cellranger/count/meta.yml create mode 100644 modules/cellranger/mkfastq/main.nf create mode 100644 modules/cellranger/mkfastq/meta.yml create mode 100644 modules/cellranger/mkgtf/main.nf create mode 100644 modules/cellranger/mkgtf/meta.yml create mode 100644 tests/modules/cellranger/count/main.nf create mode 100644 tests/modules/cellranger/count/nextflow.config create mode 100644 tests/modules/cellranger/count/test.yml create mode 100644 tests/modules/cellranger/mkfastq/main.nf create mode 100644 tests/modules/cellranger/mkfastq/nextflow.config create mode 100644 tests/modules/cellranger/mkfastq/test.yml create mode 100644 tests/modules/cellranger/mkgtf/main.nf create mode 100644 tests/modules/cellranger/mkgtf/nextflow.config create mode 100644 tests/modules/cellranger/mkgtf/test.yml diff --git a/modules/cellranger/.gitignore b/modules/cellranger/.gitignore new file mode 100644 index 00000000..9f8cb0f5 --- /dev/null +++ b/modules/cellranger/.gitignore @@ -0,0 +1 @@ +cellranger-*.tar.gz diff --git a/modules/cellranger/Dockerfile b/modules/cellranger/Dockerfile index aced4233..e9437bf6 100644 --- a/modules/cellranger/Dockerfile +++ b/modules/cellranger/Dockerfile @@ -4,7 +4,7 @@ LABEL authors="Gisela Gabernet " \ # Disclaimer: this container is not provided nor supported by 10x Genomics. # Install procps and clean apt cache -RUN apt-get update \ +RUN apt-get update --allow-releaseinfo-change \ && apt-get install -y procps \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* diff --git a/modules/cellranger/readme.md b/modules/cellranger/README.md similarity index 100% rename from modules/cellranger/readme.md rename to modules/cellranger/README.md diff --git a/modules/cellranger/count/main.nf b/modules/cellranger/count/main.nf new file mode 100644 index 00000000..be3f512a --- /dev/null +++ b/modules/cellranger/count/main.nf @@ -0,0 +1,49 @@ +process CELLRANGER_COUNT { + tag "$meta.gem" + label 'process_high' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + tuple val(meta), path(reads) + path reference + + output: + path("sample-${meta.gem}/outs/*"), emit: outs + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def sample_arg = meta.samples.unique().join(",") + def reference_name = reference.name + """ + cellranger \\ + count \\ + --id='sample-${meta.gem}' \\ + --fastqs=. \\ + --transcriptome=$reference_name \\ + --sample=$sample_arg \\ + --localcores=$task.cpus \\ + --localmem=${task.memory.toGiga()} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ + + stub: + """ + mkdir -p "sample-${meta.gem}/outs/" + touch sample-${meta.gem}/outs/fake_file.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/count/meta.yml b/modules/cellranger/count/meta.yml new file mode 100644 index 00000000..e4647c98 --- /dev/null +++ b/modules/cellranger/count/meta.yml @@ -0,0 +1,40 @@ +name: cellranger_count +description: Module to use Cell Ranger's pipelines analyze sequencing data produced from Chromium Single Cell Gene Expression. +keywords: + - align + - count + - reference +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger +output: + - outs: + type: file + description: Files containing the outputs of Cell Ranger + pattern: "sample-${meta.gem}/outs/*" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkfastq/main.nf b/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..14d68665 --- /dev/null +++ b/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKFASTQ { + tag "mkfastq" + label 'process_medium' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "litd/docker-cellranger:v6.1.1" // FIXME Add bcl2fastq to nf-core docker image + + input: + path bcl + path csv + + output: + path "versions.yml", emit: versions + path "*.fastq.gz" , emit: fastq + + script: + def args = task.ext.args ?: '' + """ + cellranger mkfastq --id=${bcl.getSimpleName()} \ + --run=$bcl \ + --csv=$csv + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkfastq/meta.yml b/modules/cellranger/mkfastq/meta.yml new file mode 100644 index 00000000..e288fb8c --- /dev/null +++ b/modules/cellranger/mkfastq/meta.yml @@ -0,0 +1,38 @@ +name: cellranger_mkfastq +description: Module to create fastqs needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkfastq command. +keywords: + - reference + - mkfastq + - fastq + - illumina + - bcl2fastq +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - bcl: + type: file + description: Base call files + pattern: "*.bcl.bgzf" + - csv: + type: file + description: Sample sheet + pattern: "*.csv" +output: + - fastq: + type: file + description: Unaligned FastQ files + pattern: "*.fastq.gz" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" + - "@RHReynolds" diff --git a/modules/cellranger/mkgtf/main.nf b/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..4db274d7 --- /dev/null +++ b/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKGTF { + tag "$gtf" + label 'process_low' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + path gtf + + output: + path "*.filtered.gtf", emit: gtf + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + cellranger \\ + mkgtf \\ + $gtf \\ + ${gtf.baseName}.filtered.gtf \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkgtf/meta.yml b/modules/cellranger/mkgtf/meta.yml new file mode 100644 index 00000000..c160072f --- /dev/null +++ b/modules/cellranger/mkgtf/meta.yml @@ -0,0 +1,31 @@ +name: cellranger_mkgtf +description: Module to build a filtered gtf needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkgtf command. +keywords: + - reference + - mkref + - index +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - gtf: + type: file + description: + pattern: "*.gtf" +output: + - gtf: + type: folder + description: gtf transcriptome file + pattern: "*.filtered.gtf" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkref/meta.yml b/modules/cellranger/mkref/meta.yml index 9b849af7..06bf5b93 100644 --- a/modules/cellranger/mkref/meta.yml +++ b/modules/cellranger/mkref/meta.yml @@ -1,39 +1,37 @@ name: cellranger_mkref description: Module to build the reference needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkref command. keywords: - - reference - - mkref - - index + - reference + - mkref + - index tools: - - cellranger: - description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. - homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger - documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - doi: "" - licence: 10x Genomics EULA - + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA input: - - fasta: - type: file - description: fasta genome file - pattern: "*.{fasta,fa}" - - gtf: - type: file - description: gtf transcriptome file - pattern: "*.gtf" - - reference_name: - type: val - description: name to give the reference folder - pattern: str - + - fasta: + type: file + description: fasta genome file + pattern: "*.{fasta,fa}" + - gtf: + type: file + description: gtf transcriptome file + pattern: "*.gtf" + - reference_name: + type: val + description: name to give the reference folder + pattern: str output: - - versions: - type: file - description: File containing software version - pattern: "versions.yml" - - reference: - type: folder - description: Folder containing all the reference indices needed by Cell Ranger + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger + - versions: + type: file + description: File containing software version + pattern: "versions.yml" authors: - "@ggabernet" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index aa59b7c9..85689d8b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -270,9 +270,25 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** -cellranger/mkref: +cellranger/gtf: # &cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/mkref: # &cellranger/mkref - modules/cellranger/mkref/** - tests/modules/cellranger/mkref/** + # - *cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/count: + - modules/cellranger/count/** + - tests/modules/cellranger/count/** + # - *cellranger/mkref + - modules/cellranger/mkref/** + - tests/modules/cellranger/mkref/** + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** checkm/lineagewf: - modules/checkm/lineagewf/** diff --git a/tests/modules/cellranger/count/main.nf b/tests/modules/cellranger/count/main.nf new file mode 100644 index 00000000..bb9e11d1 --- /dev/null +++ b/tests/modules/cellranger/count/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' +include { CELLRANGER_COUNT } from '../../../../modules/cellranger/count/main.nf' + +workflow test_cellranger_count { + + input = [ [ id:'test', single_end:true, strandedness:'forward', gem: '123', samples: ["test_10x"] ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_10x_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_10x_2_fastq_gz'], checkIfExists: true) + ] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + reference_name = "homo_sapiens_chr22_reference" + + CELLRANGER_MKGTF ( gtf ) + + CELLRANGER_MKREF ( + fasta, + CELLRANGER_MKGTF.out.gtf, + reference_name + ) + + CELLRANGER_COUNT( + input, + CELLRANGER_MKREF.out.reference + ) +} diff --git a/tests/modules/cellranger/count/nextflow.config b/tests/modules/cellranger/count/nextflow.config new file mode 100644 index 00000000..16419fce --- /dev/null +++ b/tests/modules/cellranger/count/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + + withName: CELLRANGER_COUNT { + ext.args = '--chemistry SC3Pv3' + } + +} diff --git a/tests/modules/cellranger/count/test.yml b/tests/modules/cellranger/count/test.yml new file mode 100644 index 00000000..6b151a2a --- /dev/null +++ b/tests/modules/cellranger/count/test.yml @@ -0,0 +1,19 @@ +- name: cellranger count test_cellranger_count + command: nextflow run tests/modules/cellranger/count -entry test_cellranger_count -c tests/config/nextflow.config -c tests/modules/cellranger/count/nextflow.config + tags: + - cellranger + - cellranger/count + files: + - path: output/cellranger/sample-123/outs/filtered_feature_bc_matrix.h5 + - path: output/cellranger/sample-123/outs/metrics_summary.csv + md5sum: 707df0f101d479d93f412ca74f9c4131 + - path: output/cellranger/sample-123/outs/molecule_info.h5 + md5sum: cf03b2b3ca776a1c37aa3518e91268ba + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam + md5sum: 15441da9cfceea0bb48c8b66b1b860df + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam.bai + md5sum: 7c3d49c77016a09535aff61a027f750c + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix.h5 + md5sum: 40c8df814eb8723b7317b234dc8222e9 + - path: output/cellranger/sample-123/outs/web_summary.html diff --git a/tests/modules/cellranger/mkfastq/main.nf b/tests/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..5e594fd1 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { CELLRANGER_MKFASTQ } from '../../../../modules/cellranger/mkfastq/main.nf' + +workflow test_cellranger_mkfastq_simple { + + simple_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-simple-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, simple_csv) +} + +workflow test_cellranger_mkfastq_illumina { + + samplesheet_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-samplesheet-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, samplesheet_csv) +} diff --git a/tests/modules/cellranger/mkfastq/nextflow.config b/tests/modules/cellranger/mkfastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkfastq/test.yml b/tests/modules/cellranger/mkfastq/test.yml new file mode 100644 index 00000000..bdd32187 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/test.yml @@ -0,0 +1,13 @@ +- name: cellranger mkfastq test_cellranger_mkfastq_simple + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_simple -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq + # files: + # - path: output/cellranger/genome.filtered.gtf + # md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b +- name: cellranger mkfastq test_cellranger_mkfastq_illumina + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_illumina -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq diff --git a/tests/modules/cellranger/mkgtf/main.nf b/tests/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..19e2cba0 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,11 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' + +workflow test_cellranger_mkgtf { + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + CELLRANGER_MKGTF ( gtf ) +} diff --git a/tests/modules/cellranger/mkgtf/nextflow.config b/tests/modules/cellranger/mkgtf/nextflow.config new file mode 100644 index 00000000..03fd9e09 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + +} diff --git a/tests/modules/cellranger/mkgtf/test.yml b/tests/modules/cellranger/mkgtf/test.yml new file mode 100644 index 00000000..2130afd2 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/test.yml @@ -0,0 +1,8 @@ +- name: cellranger mkgtf test_cellranger_mkgtf + command: nextflow run tests/modules/cellranger/mkgtf -entry test_cellranger_mkgtf -c tests/config/nextflow.config -c tests/modules/cellranger/mkgtf/nextflow.config + tags: + - cellranger + - cellranger/mkgtf + files: + - path: output/cellranger/genome.filtered.gtf + md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b From cd94731789aa516631e5ea11e0f49469f2ba82dd Mon Sep 17 00:00:00 2001 From: tamuanand Date: Sun, 5 Dec 2021 16:45:09 -0500 Subject: [PATCH 281/314] Update meta.yml by fixing html pattern expected (#1113) Fixed html pattern typo: Before: pattern: "*.thml" After fix: pattern: "*.html" --- modules/fastp/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index 6e133871..a1875faf 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -40,7 +40,7 @@ output: - html: type: file description: Results in HTML format - pattern: "*.thml" + pattern: "*.html" - log: type: file description: fastq log file From f3ffa69b8dcde337dce1782c15118323c7ea14d7 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 6 Dec 2021 08:56:41 +0000 Subject: [PATCH 282/314] Dragmap (#1108) * feat(dragmap): Add initial hastable module * feat(dragmap): Add initial align module * test(dragmap): Remove md5sum Forgot sam files have a header. Might pipe this through samtools. * build(dragmap): Add mulled container * chore(dragmap): Update prefix * feat(dragmap): Output a bam file * feat(dragmap): Add log files * Update modules/dragmap/align/meta.yml Co-authored-by: Jose Espinosa-Carrasco --- modules/dragmap/align/main.nf | 59 +++++++++++++++++++ modules/dragmap/align/meta.yml | 42 +++++++++++++ modules/dragmap/hashtable/main.nf | 33 +++++++++++ modules/dragmap/hashtable/meta.yml | 30 ++++++++++ tests/config/pytest_modules.yml | 8 +++ tests/modules/dragmap/align/main.nf | 33 +++++++++++ tests/modules/dragmap/align/nextflow.config | 5 ++ tests/modules/dragmap/align/test.yml | 17 ++++++ tests/modules/dragmap/hashtable/main.nf | 15 +++++ .../modules/dragmap/hashtable/nextflow.config | 5 ++ tests/modules/dragmap/hashtable/test.yml | 19 ++++++ 11 files changed, 266 insertions(+) create mode 100644 modules/dragmap/align/main.nf create mode 100644 modules/dragmap/align/meta.yml create mode 100644 modules/dragmap/hashtable/main.nf create mode 100644 modules/dragmap/hashtable/meta.yml create mode 100644 tests/modules/dragmap/align/main.nf create mode 100644 tests/modules/dragmap/align/nextflow.config create mode 100644 tests/modules/dragmap/align/test.yml create mode 100644 tests/modules/dragmap/hashtable/main.nf create mode 100644 tests/modules/dragmap/hashtable/nextflow.config create mode 100644 tests/modules/dragmap/hashtable/test.yml diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf new file mode 100644 index 00000000..f6d6877e --- /dev/null +++ b/modules/dragmap/align/main.nf @@ -0,0 +1,59 @@ +process DRAGMAP_ALIGN { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1 bioconda::samtools=1.14 conda-forge::pigz=2.3.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0': + 'quay.io/biocontainers/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0' }" + + input: + tuple val(meta), path(reads) + path hashmap + + output: + tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path('*.log'), emit: log + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if (meta.single_end) { + """ + dragen-os \\ + -r $hashmap \\ + -1 $reads \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } else { + """ + dragen-os \\ + -r $hashmap \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } +} diff --git a/modules/dragmap/align/meta.yml b/modules/dragmap/align/meta.yml new file mode 100644 index 00000000..e943ccf8 --- /dev/null +++ b/modules/dragmap/align/meta.yml @@ -0,0 +1,42 @@ +name: dragmap_align +description: Performs fastq alignment to a reference using DRAGMAP +keywords: + - alignment + - map + - fastq + - bam + - sam +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "Directory containing DRAGMAP hash table *.{cmp,.bin,.txt}" +output: + - bam: + type: file + description: Output BAM file containing read alignments + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/modules/dragmap/hashtable/main.nf b/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..ab55364b --- /dev/null +++ b/modules/dragmap/hashtable/main.nf @@ -0,0 +1,33 @@ +process DRAGMAP_HASHTABLE { + tag "$fasta" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragmap:1.2.1--hd4ca14e_0': + 'quay.io/biocontainers/dragmap:1.2.1--hd4ca14e_0' }" + + input: + path fasta + + output: + path "dragmap" , emit: hashmap + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + mkdir dragmap + dragen-os \\ + --build-hash-table true \\ + --ht-reference $fasta \\ + --output-directory dragmap \\ + $args \\ + --ht-num-threads $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + END_VERSIONS + """ +} diff --git a/modules/dragmap/hashtable/meta.yml b/modules/dragmap/hashtable/meta.yml new file mode 100644 index 00000000..86e58789 --- /dev/null +++ b/modules/dragmap/hashtable/meta.yml @@ -0,0 +1,30 @@ +name: dragmap_hashtable +description: Create DRAGEN hashtable for reference genome +keywords: + - index + - fasta + - genome + - reference +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - fasta: + type: file + description: Input genome fasta file +output: + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "*.{cmp,.bin,.txt}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 85689d8b..bbe89840 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -402,6 +402,14 @@ diamond/makedb: - modules/diamond/makedb/** - tests/modules/diamond/makedb/** +dragmap/align: + - modules/dragmap/align/** + - tests/modules/dragmap/align/** + +dragmap/hashtable: + - modules/dragmap/hashtable/** + - tests/modules/dragmap/hashtable/** + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** diff --git a/tests/modules/dragmap/align/main.nf b/tests/modules/dragmap/align/main.nf new file mode 100644 index 00000000..92e8c265 --- /dev/null +++ b/tests/modules/dragmap/align/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' +include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' + +workflow test_dragmap_align_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) +} + +workflow test_dragmap_align_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) +} diff --git a/tests/modules/dragmap/align/nextflow.config b/tests/modules/dragmap/align/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dragmap/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dragmap/align/test.yml b/tests/modules/dragmap/align/test.yml new file mode 100644 index 00000000..75c5ea96 --- /dev/null +++ b/tests/modules/dragmap/align/test.yml @@ -0,0 +1,17 @@ +- name: dragmap align single-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log diff --git a/tests/modules/dragmap/hashtable/main.nf b/tests/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..91b43caa --- /dev/null +++ b/tests/modules/dragmap/hashtable/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' + +workflow test_dragmap_hashtable { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) +} + +// TODO Add test using alt-masked bed file +// https://github.com/Illumina/dragmap#build-hash-table-using-an-alt-masked-bed-file diff --git a/tests/modules/dragmap/hashtable/nextflow.config b/tests/modules/dragmap/hashtable/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dragmap/hashtable/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dragmap/hashtable/test.yml b/tests/modules/dragmap/hashtable/test.yml new file mode 100644 index 00000000..59a3ed55 --- /dev/null +++ b/tests/modules/dragmap/hashtable/test.yml @@ -0,0 +1,19 @@ +- name: dragmap hashtable + command: nextflow run ./tests/modules/dragmap/hashtable -entry test_dragmap_hashtable -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/hashtable/nextflow.config + tags: + - dragmap + - dragmap/hashtable + files: + - path: output/dragmap/dragmap/hash_table.cfg + - path: output/dragmap/dragmap/hash_table.cfg.bin + - path: output/dragmap/dragmap/hash_table.cmp + md5sum: bc210e5358fd65656f9aea297b59ec7d + - path: output/dragmap/dragmap/hash_table_stats.txt + - path: output/dragmap/dragmap/reference.bin + md5sum: b6b5c12a42416b990cd2844de8f33c5d + - path: output/dragmap/dragmap/ref_index.bin + md5sum: 8470be9566ecee77eb4aea6a38922a66 + - path: output/dragmap/dragmap/repeat_mask.bin + md5sum: 2439259a2fd32a1d0f4c53d585f3da3a + - path: output/dragmap/dragmap/str_table.bin + md5sum: 302e2b30993973527e69c6bcd1f093d0 From e0aa89141ffecb5f54d230f7ea46de242b74e084 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Mon, 6 Dec 2021 11:37:04 +0100 Subject: [PATCH 283/314] Add meta information to samtools/faidx (#1114) * add meta to samtools/faidx --- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/faidx/meta.yml | 10 ++++++++++ tests/modules/samtools/faidx/main.nf | 6 ++++-- tests/modules/samtools/faidx/test.yml | 6 ++++-- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index c53373a9..d8308b03 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -8,11 +8,11 @@ process SAMTOOLS_FAIDX { 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: - path fasta + tuple val(meta), path(fasta) output: - path "*.fai" , emit: fai - path "versions.yml", emit: versions + tuple val(meta), path ("*.fai") , emit: fai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index 16c0b334..bae97a39 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -14,11 +14,21 @@ tools: doi: 10.1093/bioinformatics/btp352 licence: ['MIT'] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fasta: type: file description: FASTA file pattern: "*.{fa,fasta}" output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fai: type: file description: FASTA index file diff --git a/tests/modules/samtools/faidx/main.nf b/tests/modules/samtools/faidx/main.nf index bc47c847..bc4dc5e3 100644 --- a/tests/modules/samtools/faidx/main.nf +++ b/tests/modules/samtools/faidx/main.nf @@ -5,7 +5,9 @@ nextflow.enable.dsl = 2 include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' workflow test_samtools_faidx { - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - SAMTOOLS_FAIDX ( fasta ) + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + SAMTOOLS_FAIDX ( input ) } diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index f0224f34..dc2184ee 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,8 +1,10 @@ -- name: samtools faidx test workflow - command: nextflow run ./tests/modules/samtools/faidx -entry test_samtools_faidx -c ./tests/config/nextflow.config -c ./tests/modules/samtools/faidx/nextflow.config +- name: samtools faidx test_samtools_faidx + command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config tags: - samtools - samtools/faidx files: - path: output/samtools/genome.fasta.fai md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/samtools/versions.yml + md5sum: d56671a7c8f8058944d3d536c3058f7f From 98b024c0e46ef0ea994cd2cba408f0043e7e4dcf Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 6 Dec 2021 12:59:49 +0100 Subject: [PATCH 284/314] Fix syntax for extra containers for ensemblvep and snpeff (#1105) * fix: correct syntax for task.ext.use_cache * Apply suggestions from code review * fix: simplify logic * fix: update to new syntax --- modules/ensemblvep/main.nf | 14 ++++++-------- modules/snpeff/main.nf | 14 ++++++-------- tests/modules/ensemblvep/nextflow.config | 3 +-- tests/modules/snpeff/nextflow.config | 3 +-- 4 files changed, 14 insertions(+), 20 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 3182feb2..9caffb0c 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -2,13 +2,11 @@ process ENSEMBLVEP { label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - if (task.ext.use_cache) { - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + !task.ext.container_tag ? 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" - } else { - container "nfcore/vep:${task.ext.vep_tag}" - } + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' : + "nfcore/vep:${task.ext.container_tag}" }" input: tuple val(meta), path(vcf) @@ -25,7 +23,7 @@ process ENSEMBLVEP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" + def dir_cache = cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix @@ -39,7 +37,7 @@ process ENSEMBLVEP { --cache_version $cache_version \\ --dir_cache $dir_cache \\ --fork $task.cpus \\ - --format vcf \\ + --vcf \\ --stats_file ${prefix}.summary.html rm -rf $prefix diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index d0ec993e..db9cca72 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -2,13 +2,11 @@ process SNPEFF { label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - if (task.ext.use_cache) { - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + !task.ext.container_tag ? 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" - } else { - container "nfcore/snpeff:${task.ext.snpeff_tag}" - } + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' : + "nfcore/snpeff:${task.ext.container_tag}" }" input: tuple val(meta), path(vcf) @@ -29,14 +27,14 @@ process SNPEFF { avail_mem = task.memory.giga } def prefix = task.ext.prefix ?: "${meta.id}" - def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" + def cache_command = cache ? "-dataDir \${PWD}/${cache}" : "" """ snpEff \\ -Xmx${avail_mem}g \\ $db \\ $args \\ -csvStats ${prefix}.csv \\ - $dir_cache \\ + $cache_command \\ $vcf \\ > ${prefix}.ann.vcf diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config index bcca2d06..717fcae9 100644 --- a/tests/modules/ensemblvep/nextflow.config +++ b/tests/modules/ensemblvep/nextflow.config @@ -3,8 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ENSEMBLVEP { - ext.vep_tag = '104.3.WBcel235' - ext.use_cache = false + ext.container_tag = '104.3.WBcel235' } } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config index 589c8cfb..3b094eed 100644 --- a/tests/modules/snpeff/nextflow.config +++ b/tests/modules/snpeff/nextflow.config @@ -3,8 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SNPEFF { - ext.snpeff_tag = '5.0.WBcel235' - ext.use_cache = false + ext.container_tag = '5.0.WBcel235' } } From e22966ce74340cb671576143e5fdbbd71670cffa Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 10:12:58 +0100 Subject: [PATCH 285/314] feat: emited channel should be gz_tbi and not only tbi (#1118) --- modules/tabix/bgziptabix/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 20b47a9f..e419d153 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -11,7 +11,7 @@ process TABIX_BGZIPTABIX { tuple val(meta), path(input) output: - tuple val(meta), path("*.gz"), path("*.tbi"), emit: tbi + tuple val(meta), path("*.gz"), path("*.tbi"), emit: gz_tbi path "versions.yml" , emit: versions script: From 7006699ff8e4351e4c95d548de959d4222c7862a Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 7 Dec 2021 10:22:32 +0100 Subject: [PATCH 286/314] Update version & prefix (#1120) * Update version & prefix * Fix indentation --- modules/seqkit/split2/main.nf | 25 ++++----- tests/modules/seqkit/split2/test.yml | 78 ++++++++++++++++------------ 2 files changed, 58 insertions(+), 45 deletions(-) diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index fc027793..7e361a06 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -2,29 +2,30 @@ process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) + conda (params.enable_conda ? 'bioconda::seqkit=2.1.0' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0' : - 'quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0' }" + 'https://depot.galaxyproject.org/singularity/seqkit:2.1.0--h9ee0642_0' : + 'quay.io/biocontainers/seqkit:2.1.0--h9ee0642_0' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*${prefix}/*.gz"), emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("**/*.gz"), emit: reads + path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if(meta.single_end){ """ seqkit \\ split2 \\ $args \\ --threads $task.cpus \\ - -1 $reads \\ - --out-dir $prefix + $reads \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -37,9 +38,9 @@ process SEQKIT_SPLIT2 { split2 \\ $args \\ --threads $task.cpus \\ - -1 ${reads[0]} \\ - -2 ${reads[1]} \\ - --out-dir $prefix + --read1 ${reads[0]} \\ + --read2 ${reads[1]} \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index 12b02072..00368e22 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -1,83 +1,95 @@ -- name: seqkit split2 single-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 + - path: output/seqkit/versions.yml + md5sum: 2d5a709d129be364687cc0b561efa532 -- name: seqkit split2 single-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 + - path: output/seqkit/versions.yml + md5sum: 490d00accd1092a8eca4e83ed809bad3 -- name: seqkit split2 single-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee + - path: output/seqkit/versions.yml + md5sum: 90431cd3d28954f656988230d4481115 -- name: seqkit split2 paired-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 6b094b1ba7c439fe44c1bb5e99a02ba4 + md5sum: 160b5fd363ff7cad8af9d914269d6426 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 927097c6ac7522199a9e016333181a8e + md5sum: 18bc5434cf55706394cccb44e6108561 + - path: output/seqkit/versions.yml + md5sum: 9272afc1a126ae997a712edeef317f22 -- name: seqkit split2 paired-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 09d0dd83b5b1b9b95d316eeed79ea5ba + md5sum: c0602b62aae860dd284c0eb0062c24dd - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8796c3f327b1094244bfcdb36d536526 + md5sum: 5bc7a98b618100b29910eb41c4c9ac0d + - path: output/seqkit/versions.yml + md5sum: af66912ae8abc493f77f70e3bf473144 -- name: seqkit split2 paired-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: f0055c99cd193fd97466b3cde9dd1b8f + md5sum: b51a1bed106e4ec0c9be7d9e224d0616 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8a90df768201785f7a7cd5dbb41e846a + md5sum: 079078a7f86114ae29cda8c00d5a7fc9 - path: output/seqkit/test/test_2.part_003.fastq.gz - md5sum: 890b90083e8e1606bd13ba34149cedd7 + md5sum: 6987941bf8c4a37565e333029ba41ca0 + - path: output/seqkit/versions.yml + md5sum: 193bc5f0c429076f816ab0a529c4c1fc From 6510a7ff4f339625ebb89055bd39b9063797bb63 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 10:30:35 +0100 Subject: [PATCH 287/314] feat: add meta.id tag (#1116) * feat: add meat.id tag * fix: actually call the right container for singularity --- modules/ensemblvep/main.nf | 9 +++++---- modules/snpeff/main.nf | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 9caffb0c..78f2712c 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,12 +1,13 @@ process ENSEMBLVEP { + tag "$meta.id" label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - !task.ext.container_tag ? + container "${ task.ext.container_tag ? + "nfcore/vep:${task.ext.container_tag}" : + workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' : - "nfcore/vep:${task.ext.container_tag}" }" + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" input: tuple val(meta), path(vcf) diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index db9cca72..9847c513 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,12 +1,13 @@ process SNPEFF { + tag "$meta.id" label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - !task.ext.container_tag ? + container "${ task.ext.container_tag ? + "nfcore/snpeff:${task.ext.container_tag}" : + workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' : - "nfcore/snpeff:${task.ext.container_tag}" }" + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" input: tuple val(meta), path(vcf) From 3b366c7c6aac446c1a4ea7c2016092344633b2ec Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 11:18:12 +0100 Subject: [PATCH 288/314] greatly simplify syntax (#1121) --- modules/ensemblvep/main.nf | 8 +++----- modules/snpeff/main.nf | 8 +++----- tests/modules/ensemblvep/nextflow.config | 2 +- tests/modules/snpeff/nextflow.config | 2 +- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 78f2712c..e3d0c286 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -3,11 +3,9 @@ process ENSEMBLVEP { label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - container "${ task.ext.container_tag ? - "nfcore/vep:${task.ext.container_tag}" : - workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" input: tuple val(meta), path(vcf) diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 9847c513..6248fee3 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -3,11 +3,9 @@ process SNPEFF { label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - container "${ task.ext.container_tag ? - "nfcore/snpeff:${task.ext.container_tag}" : - workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" input: tuple val(meta), path(vcf) diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config index 717fcae9..f13d62e9 100644 --- a/tests/modules/ensemblvep/nextflow.config +++ b/tests/modules/ensemblvep/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ENSEMBLVEP { - ext.container_tag = '104.3.WBcel235' + container = 'nfcore/vep:104.3.WBcel235' } } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config index 3b094eed..f4042ab9 100644 --- a/tests/modules/snpeff/nextflow.config +++ b/tests/modules/snpeff/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SNPEFF { - ext.container_tag = '5.0.WBcel235' + container = 'nfcore/snpeff:5.0.WBcel235' } } From d473a247d2e0c619b0df877ea19d9a5a98c8e3c8 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Tue, 7 Dec 2021 15:00:43 +0100 Subject: [PATCH 289/314] Replace remaining task.ext.suffix with task.ext.prefix (#1117) * Replace remaining task.ext.suffix with task.ext.prefix --- modules/artic/minion/main.nf | 4 ++-- modules/bakta/main.nf | 6 +++--- modules/bcftools/concat/main.nf | 4 ++-- modules/bcftools/isec/main.nf | 4 ++-- modules/bcftools/merge/main.nf | 4 ++-- modules/bedtools/getfasta/main.nf | 4 ++-- modules/checkm/lineagewf/main.nf | 4 ++-- modules/csvtk/concat/main.nf | 4 ++-- modules/damageprofiler/main.nf | 4 ++-- modules/dedup/main.nf | 4 ++-- modules/fargene/main.nf | 4 ++-- modules/gatk4/genomicsdbimport/main.nf | 4 ++-- modules/gffread/main.nf | 4 ++-- modules/leehom/main.nf | 4 ++-- modules/msisensor/msi/main.nf | 4 ++-- modules/nextclade/main.nf | 4 ++-- modules/optitype/main.nf | 6 +++--- modules/plasmidid/main.nf | 4 ++-- modules/prodigal/main.nf | 4 ++-- modules/prokka/main.nf | 4 ++-- modules/qualimap/bamqc/main.nf | 4 ++-- modules/qualimap/rnaseq/main.nf | 4 ++-- modules/quast/main.nf | 4 ++-- modules/rsem/calculateexpression/main.nf | 4 ++-- modules/salmon/quant/main.nf | 4 ++-- modules/samtools/merge/main.nf | 4 ++-- modules/seqkit/split2/main.nf | 1 - modules/seqtk/subseq/main.nf | 4 ++-- modules/tbprofiler/profile/main.nf | 4 ++-- tests/modules/gffread/nextflow.config | 2 +- tests/modules/seqtk/subseq/nextflow.config | 2 +- 31 files changed, 60 insertions(+), 61 deletions(-) diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 86863f95..ce04fcc8 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -32,8 +32,8 @@ process ARTIC_MINION { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') def fast5 = fast5_dir ? "--fast5-directory $fast5_dir" : "" def summary = sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf index 20127e53..2582dac2 100644 --- a/modules/bakta/main.nf +++ b/modules/bakta/main.nf @@ -26,8 +26,8 @@ process BAKTA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" """ @@ -47,7 +47,7 @@ process BAKTA { """ stub: - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + prefix = task.ext.prefix ?: "${meta.id}" """ touch ${prefix}.embl touch ${prefix}.faa diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index dbd9d9dc..cebd2443 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_CONCAT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools concat \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index c4eab09d..08323f28 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_ISEC { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools isec \\ $args \\ diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index 32ad760c..bfb0f162 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_MERGE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools merge -Oz \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index c4dae429..5a283e94 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -16,8 +16,8 @@ process BEDTOOLS_GETFASTA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${bed.baseName}${task.ext.suffix}" : "${bed.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${bed.baseName}" """ bedtools \\ getfasta \\ diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf index 119ee491..992b165e 100644 --- a/modules/checkm/lineagewf/main.nf +++ b/modules/checkm/lineagewf/main.nf @@ -17,8 +17,8 @@ process CHECKM_LINEAGEWF { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ checkm \\ lineage_wf \\ diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index 745a9ac4..94b1925a 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -17,8 +17,8 @@ process CSVTK_CONCAT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index da37909e..23eb9397 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -18,8 +18,8 @@ process DAMAGEPROFILER { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "-r $fasta" : "" def species_list = specieslist ? "-sf $specieslist" : "" """ diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf index 60fc376e..8b4bdc37 100644 --- a/modules/dedup/main.nf +++ b/modules/dedup/main.nf @@ -18,8 +18,8 @@ process DEDUP { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ dedup \\ diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index ac3f8338..73bdd411 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -32,8 +32,8 @@ process FARGENE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ fargene \\ $args \\ diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index 110dbf4f..e794aa5a 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -20,8 +20,8 @@ process GATK4_GENOMICSDBIMPORT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" // settings for running default create gendb mode inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index d31f76f8..e7893f8b 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -15,8 +15,8 @@ process GFFREAD { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${gff.baseName}${task.ext.suffix}" : "${gff.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${gff.baseName}" """ gffread \\ $gff \\ diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf index d997e68b..b5cb2dcb 100644 --- a/modules/leehom/main.nf +++ b/modules/leehom/main.nf @@ -24,8 +24,8 @@ process LEEHOM { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" if (reads.toString().endsWith('.bam')) { """ diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index 1eb510a1..398b34a6 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -18,8 +18,8 @@ process MSISENSOR_MSI { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ msi \\ diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 317d393d..f60af57b 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -19,8 +19,8 @@ process NEXTCLADE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ nextclade \\ $args \\ diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 24be66a7..d27f7f9f 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -15,9 +15,9 @@ process OPTITYPE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def args2 = task.ext.args2 ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ # Create a config for OptiType on a per sample basis with task.ext.args2 diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 290ae549..7404a678 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -23,8 +23,8 @@ process PLASMIDID { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ plasmidID \\ -d $fasta \\ diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index b09da13c..184b17bb 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -19,8 +19,8 @@ process PRODIGAL { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ prodigal -i "${genome}" \\ $args \\ diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index 8fae6367..551a17b9 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -28,8 +28,8 @@ process PROKKA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" """ diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index a47fde7e..973fd6a4 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -17,8 +17,8 @@ process QUALIMAP_BAMQC { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' def memory = task.memory.toGiga() + "G" diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 459f3da5..d83fcd99 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -16,8 +16,8 @@ process QUALIMAP_RNASEQ { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 43caca3d..e88051b5 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -19,8 +19,8 @@ process QUAST { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ?: 'quast' + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: 'quast' def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' """ diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index 659082fa..4b2ada47 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -23,8 +23,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 9557fd24..6cae4f72 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -20,8 +20,8 @@ process SALMON_QUANT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = "--index $index" def input_reads = meta.single_end ? "-r $reads" : "-1 ${reads[0]} -2 ${reads[1]}" diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 8eeb64a2..fcfcf61f 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -17,8 +17,8 @@ process SAMTOOLS_MERGE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 7e361a06..5bed1dae 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -17,7 +17,6 @@ process SEQKIT_SPLIT2 { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - if(meta.single_end){ """ seqkit \\ diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index 1d93b061..abfe4faa 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -16,8 +16,8 @@ process SEQTK_SUBSEQ { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ?: '' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { ext = "fq" diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf index 3f6bffc3..87175a39 100644 --- a/modules/tbprofiler/profile/main.nf +++ b/modules/tbprofiler/profile/main.nf @@ -19,8 +19,8 @@ process TBPROFILER_PROFILE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" """ tb-profiler \\ diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config index c020f934..0714a6e8 100644 --- a/tests/modules/gffread/nextflow.config +++ b/tests/modules/gffread/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GFFREAD { - ext.prefix = { "${meta.id}.out" } + ext.prefix = { "${gff.baseName}.out" } } } diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config index 24f16bad..8a8b9b45 100644 --- a/tests/modules/seqtk/subseq/nextflow.config +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_SUBSEQ { - ext.prefix = { "${meta.id}.filtered" } + ext.prefix = { ".filtered" } } } From 7389963d5cb18f81c10dff128c510e518ee4f0f6 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 7 Dec 2021 15:22:24 +0100 Subject: [PATCH 290/314] Add memory stuff to all gatk4 modules (#1122) * Add memory stuff to all gatj4 modules * Add removed input line back in * revert script section --- modules/gatk4/applybqsr/main.nf | 3 ++- modules/gatk4/baserecalibrator/main.nf | 4 ++-- modules/gatk4/bedtointervallist/main.nf | 8 +++++++- modules/gatk4/calculatecontamination/main.nf | 8 +++++++- modules/gatk4/createsequencedictionary/main.nf | 2 +- modules/gatk4/createsomaticpanelofnormals/main.nf | 8 +++++++- modules/gatk4/estimatelibrarycomplexity/main.nf | 2 +- modules/gatk4/fastqtosam/main.nf | 8 +++++++- modules/gatk4/filtermutectcalls/main.nf | 8 +++++++- modules/gatk4/genomicsdbimport/main.nf | 8 +++++++- modules/gatk4/genotypegvcfs/main.nf | 8 +++++++- modules/gatk4/getpileupsummaries/main.nf | 8 +++++++- modules/gatk4/indexfeaturefile/main.nf | 8 +++++++- modules/gatk4/intervallisttools/main.nf | 8 +++++++- modules/gatk4/learnreadorientationmodel/main.nf | 8 +++++++- modules/gatk4/markduplicates/main.nf | 6 +++--- modules/gatk4/mergebamalignment/main.nf | 8 +++++++- modules/gatk4/mergevcfs/main.nf | 8 +++++++- modules/gatk4/mutect2/main.nf | 8 +++++++- modules/gatk4/revertsam/main.nf | 8 +++++++- modules/gatk4/samtofastq/main.nf | 8 +++++++- modules/gatk4/splitncigarreads/main.nf | 8 +++++++- modules/gatk4/variantfiltration/main.nf | 2 +- 23 files changed, 129 insertions(+), 26 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index bd428d6c..3cc69ddf 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -22,13 +22,14 @@ process GATK4_APPLYBQSR { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" + def avail_mem = 3 if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk ApplyBQSR \\ + gatk --java-options "-Xmx${avail_mem}g" ApplyBQSR \\ -R $fasta \\ -I $input \\ --bqsr-recal-file $bqsr_table \\ diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 9b0bf286..17b37943 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -25,14 +25,14 @@ process GATK4_BASERECALIBRATOR { def prefix = task.ext.prefix ?: "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') - + def avail_mem = 3 if (!task.memory) { log.info '[GATK BaseRecalibrator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk BaseRecalibrator \ + gatk --java-options "-Xmx${avail_mem}g" BaseRecalibrator \ -R $fasta \ -I $input \ $sitesCommand \ diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index c4538034..2f6266b9 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -18,8 +18,14 @@ process GATK4_BEDTOINTERVALLIST { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK BedToIntervalList] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk BedToIntervalList \\ + gatk --java-options "-Xmx${avail_mem}g" BedToIntervalList \\ -I $bed \\ -SD $sequence_dict \\ -O ${prefix}.interval_list \\ diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 7c112c3c..8840356a 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -21,8 +21,14 @@ process GATK4_CALCULATECONTAMINATION { def prefix = task.ext.prefix ?: "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CalculateContamination] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk CalculateContamination \\ + gatk --java-options "-Xmx${avail_mem}g" CalculateContamination \\ -I $pileup \\ $matched_command \\ -O ${prefix}.contamination.table \\ diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 8d001856..e8f32106 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -18,7 +18,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { - log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' + log.info '[GATK CreateSequenceDictionary] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 2860e82e..ff345f75 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -21,8 +21,14 @@ process GATK4_CREATESOMATICPANELOFNORMALS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CreateSomaticPanelOfNormals] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ CreateSomaticPanelOfNormals \\ -R $fasta \\ -V gendb://$genomicsdb \\ diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index f636dc46..c17dba09 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -29,7 +29,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { avail_mem = task.memory.giga } """ - gatk EstimateLibraryComplexity \ + gatk --java-options "-Xmx${avail_mem}g" EstimateLibraryComplexity \ ${crams} \ -O ${prefix}.metrics \ --REFERENCE_SEQUENCE ${fasta} \ diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index 915eb996..a55ba709 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -18,8 +18,14 @@ process GATK4_FASTQTOSAM { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FastqToSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FastqToSam \\ + gatk --java-options "-Xmx${avail_mem}g" FastqToSam \\ $read_files \\ -O ${prefix}.bam \\ -SM $prefix \\ diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 02fa804f..6a1d9b3a 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -37,8 +37,14 @@ process GATK4_FILTERMUTECTCALLS { if (contaminationfile) { contamination_options = '--contamination-table ' + contaminationfile.join(' --contamination-table ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FilterMutectCalls] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FilterMutectCalls \\ + gatk --java-options "-Xmx${avail_mem}g" FilterMutectCalls \\ -R $fasta \\ -V $vcf \\ $orientationbias_options \\ diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index e794aa5a..2751173b 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -42,8 +42,14 @@ process GATK4_GENOMICSDBIMPORT { updated_db = wspace.toString() } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenomicsDBImport] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GenomicsDBImport \\ + gatk --java-options "-Xmx${avail_mem}g" GenomicsDBImport \\ $inputs_command \\ $dir_command \\ $intervals_command \\ diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index f0b35447..1a772860 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -26,8 +26,14 @@ process GATK4_GENOTYPEGVCFS { def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenotypeGVCFs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ GenotypeGVCFs \\ $args \\ $interval_options \\ diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 99be601f..361974e8 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -24,8 +24,14 @@ process GATK4_GETPILEUPSUMMARIES { sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GetPileupSummaries] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GetPileupSummaries \\ + gatk --java-options "-Xmx${avail_mem}g" GetPileupSummaries \\ -I $bam \\ -V $variants \\ $sitesCommand \\ diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf index d33e030c..cc6c663e 100644 --- a/modules/gatk4/indexfeaturefile/main.nf +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -16,8 +16,14 @@ process GATK4_INDEXFEATUREFILE { script: def args = task.ext.args ?: '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IndexFeatureFile] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ IndexFeatureFile \\ $args \\ -I $feature_file diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 7e1a47f7..b813d844 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -17,11 +17,17 @@ process GATK4_INTERVALLISTTOOLS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IntervalListTools] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ mkdir ${prefix}_split - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ IntervalListTools \\ -I ${interval_list} \\ -O ${prefix}_split \\ diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index ac021afa..0c2f09d2 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -19,8 +19,14 @@ process GATK4_LEARNREADORIENTATIONMODEL { def prefix = task.ext.prefix ?: "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK LearnReadOrientationModel] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ LearnReadOrientationModel \\ ${inputs_list.join(' ')} \\ -O ${prefix}.tar.gz \\ diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index a109facc..8bdb2c0a 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -20,14 +20,14 @@ process GATK4_MARKDUPLICATES { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") - def avail_mem = 3 + def avail_mem = 3 if (!task.memory) { - log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + log.info '[GATK MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk MarkDuplicates \\ + gatk --java-options "-Xmx${avail_mem}g" MarkDuplicates \\ $bam_list \\ --METRICS_FILE ${prefix}.metrics \\ --TMP_DIR . \\ diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 5e552cb2..a0f54976 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -20,8 +20,14 @@ process GATK4_MERGEBAMALIGNMENT { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeBamAlignment] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeBamAlignment \\ + gatk --java-options "-Xmx${avail_mem}g" MergeBamAlignment \\ ALIGNED=$aligned \\ UNMAPPED=$unmapped \\ R=$fasta \\ diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index cd1840c3..1fcce485 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -26,8 +26,14 @@ process GATK4_MERGEVCFS { input += " I=${vcf}" } def ref = use_ref_dict ? "D=${ref_dict}" : "" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeVcfs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeVcfs \\ + gatk --java-options "-Xmx${avail_mem}g" MergeVcfs \\ $input \\ O=${prefix}.vcf.gz \\ $ref \\ diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 2cf940de..414c7705 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -53,8 +53,14 @@ process GATK4_MUTECT2 { normals_command = '-normal ' + which_norm.join( ' -normal ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK Mutect2] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk Mutect2 \\ + gatk --java-options "-Xmx${avail_mem}g" Mutect2 \\ -R ${fasta} \\ ${inputs_command} \\ ${normals_command} \\ diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 638b7705..0713d7ca 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -17,8 +17,14 @@ process GATK4_REVERTSAM { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK RevertSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk RevertSam \\ + gatk --java-options "-Xmx${avail_mem}g" RevertSam \\ I=$bam \\ O=${prefix}.reverted.bam \\ $args diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index a909f540..0afb7ef3 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -18,8 +18,14 @@ process GATK4_SAMTOFASTQ { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SamToFastq] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SamToFastq \\ + gatk --java-options "-Xmx${avail_mem}g" SamToFastq \\ I=$bam \\ $output \\ $args diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 65b82a35..6daed954 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -20,8 +20,14 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SplitNCigarReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SplitNCigarReads \\ + gatk --java-options "-Xmx${avail_mem}g" SplitNCigarReads \\ -R $fasta \\ -I $bam \\ -O ${prefix}.bam \\ diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 00dc2588..efe245cc 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -23,7 +23,7 @@ process GATK4_VARIANTFILTRATION { def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { - log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + log.info '[GATK VariantFiltration] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.toGiga() } From 31d4099f388eb1057d8befc5944305d0de3df951 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 17:12:35 +0100 Subject: [PATCH 291/314] Add subworkflows for ensemblvep and snpeff (#1124) * greatly simplify syntax * feat: add subworkflows to annotate (+ bgzip/tabix index) with ensemblvep and snpeff * feat: get versions from all tools * add commented infor for new annotation modules --- .../nf-core/annotation_ensemblvep/main.nf | 26 ++++++++++++++++ .../nf-core/annotation_ensemblvep/meta.yml | 29 ++++++++++++++++++ .../nf-core/annotation_snpeff/main.nf | 23 ++++++++++++++ .../nf-core/annotation_snpeff/meta.yml | 29 ++++++++++++++++++ tests/config/pytest_modules.yml | 30 ++++++++++++------- .../nf-core/annotation_ensemblvep/main.nf | 14 +++++++++ .../annotation_ensemblvep/nextflow.config | 14 +++++++++ .../nf-core/annotation_ensemblvep/test.yml | 7 +++++ .../nf-core/annotation_snpeff/main.nf | 14 +++++++++ .../nf-core/annotation_snpeff/nextflow.config | 14 +++++++++ .../nf-core/annotation_snpeff/test.yml | 7 +++++ 11 files changed, 196 insertions(+), 11 deletions(-) create mode 100644 subworkflows/nf-core/annotation_ensemblvep/main.nf create mode 100644 subworkflows/nf-core/annotation_ensemblvep/meta.yml create mode 100644 subworkflows/nf-core/annotation_snpeff/main.nf create mode 100644 subworkflows/nf-core/annotation_snpeff/meta.yml create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/main.nf create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/test.yml create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/main.nf create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/nextflow.config create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/test.yml diff --git a/subworkflows/nf-core/annotation_ensemblvep/main.nf b/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..3f3ecc6e --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,26 @@ +// +// Run VEP to annotate VCF files +// + +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_ENSEMBLVEP { + take: + vcf // channel: [ val(meta), vcf ] + vep_genome // value: which genome + vep_species // value: which species + vep_cache_version // value: which cache version + vep_cache // path: path_to_vep_cache (optionnal) + + main: + ENSEMBLVEP(vcf, vep_genome, vep_species, vep_cache_version, vep_cache) + ANNOTATION_BGZIPTABIX(ENSEMBLVEP.out.vcf) + + ch_versions = ENSEMBLVEP.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = ENSEMBLVEP.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_ensemblvep/meta.yml b/subworkflows/nf-core/annotation_ensemblvep/meta.yml new file mode 100644 index 00000000..e7d92ce9 --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/meta.yml @@ -0,0 +1,29 @@ +name: annotation_ensemblvep +description: | + Perform annotation with ensemblvep and bgzip + tabix index the resulting VCF file +keywords: + - ensemblvep +modules: + - ensemblvep + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/subworkflows/nf-core/annotation_snpeff/main.nf b/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..add5f9c8 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,23 @@ +// +// Run SNPEFF to annotate VCF files +// + +include { SNPEFF } from '../../../modules/snpeff/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_SNPEFF { + take: + vcf // channel: [ val(meta), vcf ] + snpeff_db // value: version of db to use + snpeff_cache // path: path_to_snpeff_cache (optionnal) + + main: + SNPEFF(vcf, snpeff_db, snpeff_cache) + ANNOTATION_BGZIPTABIX(SNPEFF.out.vcf) + ch_versions = SNPEFF.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = SNPEFF.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_snpeff/meta.yml b/subworkflows/nf-core/annotation_snpeff/meta.yml new file mode 100644 index 00000000..164a0ee2 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/meta.yml @@ -0,0 +1,29 @@ +name: annotation_snpeff +description: | + Perform annotation with snpeff and bgzip + tabix index the resulting VCF file +keywords: + - snpeff +modules: + - snpeff + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index bbe89840..e35f8908 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1451,6 +1451,19 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** +# subworkflows/align_bowtie2: +# - subworkflows/nf-core/align_bowtie2/** +# - tests/subworkflows/nf-core/align_bowtie2/** +# - *subworkflows_bam_sort_samtools + +# subworkflows/annotation_ensemblvep: &subworkflows_annotation_ensemblvep +# - subworkflows/nf-core/annotation_ensemblvep/** +# - tests/subworkflows/nf-core/annotation_ensemblvep/** + +# subworkflows/annotation_snpeff: &subworkflows_annotation_snpeff +# - subworkflows/nf-core/annotation_snpeff/** +# - tests/subworkflows/nf-core/annotation_snpeff/** + # subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools # - subworkflows/nf-core/bam_stats_samtools/** # - tests/subworkflows/nf-core/bam_stats_samtools/** @@ -1462,17 +1475,6 @@ yara/mapper: # - *samtools_index # - *subworkflows_bam_stats_samtools -# subworkflows/align_bowtie2: -# - subworkflows/nf-core/align_bowtie2/** -# - tests/subworkflows/nf-core/align_bowtie2/** -# - *subworkflows_bam_sort_samtools - -# subworkflows/sra_fastq: -# - subworkflows/nf-core/sra_fastq/** -# - tests/subworkflows/nf-core/sra_fastq/** -# - *sratools_fasterqdump -# - *sratools_prefetch - # subworkflows/gatk_create_som_pon: # - subworkflows/nf-core/gatk_create_som_pon/** # - tests/subworkflows/nf-core/gatk_create_som_pon/** @@ -1495,3 +1497,9 @@ yara/mapper: # - *gatk4_getpileupsummaries # - *gatk4_calculatecontamination # - *gatk4_filtermutectcalls + +# subworkflows/sra_fastq: +# - subworkflows/nf-core/sra_fastq/** +# - tests/subworkflows/nf-core/sra_fastq/** +# - *sratools_fasterqdump +# - *sratools_prefetch diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..0f00c62e --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_ENSEMBLVEP } from '../../../../subworkflows/nf-core/annotation_ensemblvep/main' + +workflow annotation_ensemblvep { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config new file mode 100644 index 00000000..4e8d2990 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + container = 'nfcore/vep:104.3.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_VEP.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml new file mode 100644 index 00000000..706d9d05 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml @@ -0,0 +1,7 @@ +- name: ensemblvep annotation_ensemblvep + command: nextflow run ./tests/subworkflows/nf-core/annotation_ensemblvep -entry annotation_ensemblvep -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config + tags: + - annotation_ensemblvep + files: + - path: output/annotation/test_VEP.ann.vcf.gz + - path: output/annotation/test_VEP.ann.vcf.gz.tbi diff --git a/tests/subworkflows/nf-core/annotation_snpeff/main.nf b/tests/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..c80197ee --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_SNPEFF } from '../../../../subworkflows/nf-core/annotation_snpeff/main' + +workflow annotation_snpeff { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_SNPEFF ( input, "WBcel235.99", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config new file mode 100644 index 00000000..be76cb4a --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + container = 'nfcore/snpeff:5.0.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_snpEff.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/test.yml b/tests/subworkflows/nf-core/annotation_snpeff/test.yml new file mode 100644 index 00000000..943b24e9 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/test.yml @@ -0,0 +1,7 @@ +- name: snpeff annotation_snpeff + command: nextflow run ./tests/subworkflows/nf-core/annotation_snpeff -entry annotation_snpeff -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_snpeff/nextflow.config + tags: + - annotation_snpeff + files: + - path: output/annotation/test_snpEff.ann.vcf.gz + - path: output/annotation/test_snpEff.ann.vcf.gz.tbi From 1765225042d40cf01eb9469ea95bf6d0f6810937 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 8 Dec 2021 15:31:27 +0100 Subject: [PATCH 292/314] Add new boolean sort_bam as input to be able to chose between sort and view in bwamem and bwamem2mem (#1125) * feat: view is now in args2 so we can use sort * forgot one split_cpus * feat: update with new logic * fix: add more info * fix: remove split_cpus logic --- modules/bwa/mem/main.nf | 4 ++- modules/bwa/mem/meta.yml | 4 +++ modules/bwamem2/mem/main.nf | 4 ++- modules/bwamem2/mem/meta.yml | 10 ++++-- tests/modules/bwa/mem/main.nf | 37 ++++++++++++++++++++-- tests/modules/bwa/mem/nextflow.config | 4 +++ tests/modules/bwa/mem/test.yml | 36 +++++++++++++++++++++ tests/modules/bwamem2/mem/main.nf | 38 +++++++++++++++++++++-- tests/modules/bwamem2/mem/nextflow.config | 4 +++ tests/modules/bwamem2/mem/test.yml | 36 +++++++++++++++++++++ 10 files changed, 168 insertions(+), 9 deletions(-) diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 801293a8..9695bd2d 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -10,6 +10,7 @@ process BWA_MEM { input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process BWA_MEM { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` @@ -29,7 +31,7 @@ process BWA_MEM { -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 61eaddef..c7c28f19 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -32,6 +32,10 @@ input: type: file description: BWA genome index files pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 81b4b8ab..6d4d8028 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -10,6 +10,7 @@ process BWAMEM2_MEM { input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process BWAMEM2_MEM { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` @@ -30,7 +32,7 @@ process BWAMEM2_MEM { -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 -@ $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 58a35e08..71e83759 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -11,9 +11,9 @@ keywords: tools: - bwa: description: | - BWA is a software package for mapping DNA sequences against + BWA-mem2 is a software package for mapping DNA sequences against a large reference genome, such as the human genome. - homepage: http://bio-bwa.sourceforge.net/ + homepage: https://github.com/bwa-mem2/bwa-mem2 documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 licence: ['MIT'] @@ -31,7 +31,11 @@ input: - index: type: file description: BWA genome index files - pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + pattern: "Directory containing BWA index *.{0132,amb,ann,bwt.2bit.64,pac}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/tests/modules/bwa/mem/main.nf b/tests/modules/bwa/mem/main.nf index 117cbb4d..c9c57197 100644 --- a/tests/modules/bwa/mem/main.nf +++ b/tests/modules/bwa/mem/main.nf @@ -18,7 +18,23 @@ workflow test_bwa_mem_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with single-end data and sort +// +workflow test_bwa_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } // @@ -35,5 +51,22 @@ workflow test_bwa_mem_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwa_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } diff --git a/tests/modules/bwa/mem/nextflow.config b/tests/modules/bwa/mem/nextflow.config index 8730f1c4..d15f6939 100644 --- a/tests/modules/bwa/mem/nextflow.config +++ b/tests/modules/bwa/mem/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: BWA_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + } diff --git a/tests/modules/bwa/mem/test.yml b/tests/modules/bwa/mem/test.yml index 93535043..8fe2ee6b 100644 --- a/tests/modules/bwa/mem/test.yml +++ b/tests/modules/bwa/mem/test.yml @@ -16,6 +16,24 @@ - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 +- name: bwa mem single-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + - name: bwa mem paired-end command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: @@ -33,3 +51,21 @@ md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + +- name: bwa mem paired-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwamem2/mem/main.nf b/tests/modules/bwamem2/mem/main.nf index 2ab557e6..b4293dbe 100644 --- a/tests/modules/bwamem2/mem/main.nf +++ b/tests/modules/bwamem2/mem/main.nf @@ -18,9 +18,26 @@ workflow test_bwamem2_mem_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) } +// +// Test with single-end data and sort +// +workflow test_bwamem2_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) +} + + // // Test with paired-end data // @@ -35,5 +52,22 @@ workflow test_bwamem2_mem_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwamem2_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) } diff --git a/tests/modules/bwamem2/mem/nextflow.config b/tests/modules/bwamem2/mem/nextflow.config index 8730f1c4..b5181865 100644 --- a/tests/modules/bwamem2/mem/nextflow.config +++ b/tests/modules/bwamem2/mem/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: BWAMEM2_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + } diff --git a/tests/modules/bwamem2/mem/test.yml b/tests/modules/bwamem2/mem/test.yml index c1724bc0..bf445ebe 100644 --- a/tests/modules/bwamem2/mem/test.yml +++ b/tests/modules/bwamem2/mem/test.yml @@ -16,6 +16,24 @@ - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 +- name: bwamem2 mem single-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - name: bwamem2 mem paired-end command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: @@ -33,3 +51,21 @@ md5sum: d097a1b82dee375d41a1ea69895a9216 - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 + +- name: bwamem2 mem paired-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 From ca3ae9ff4f39eb3553ad6b3d2e1fa511131685de Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 8 Dec 2021 23:19:37 +0100 Subject: [PATCH 293/314] feat: update dragmap to follow new bwa/mem + bwamem2/mem logic (#1146) --- modules/dragmap/align/main.nf | 6 ++-- tests/modules/dragmap/align/main.nf | 33 +++++++++++++++++++-- tests/modules/dragmap/align/nextflow.config | 8 +++-- tests/modules/dragmap/align/test.yml | 18 +++++++++++ 4 files changed, 58 insertions(+), 7 deletions(-) diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf index f6d6877e..8a6f082a 100644 --- a/modules/dragmap/align/main.nf +++ b/modules/dragmap/align/main.nf @@ -10,6 +10,7 @@ process DRAGMAP_ALIGN { input: tuple val(meta), path(reads) path hashmap + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process DRAGMAP_ALIGN { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def samtools_command = sort_bam ? 'sort' : 'view' if (meta.single_end) { """ dragen-os \\ @@ -28,7 +30,7 @@ process DRAGMAP_ALIGN { --num-threads $task.cpus \\ $args \\ 2> ${prefix}.dragmap.log \\ - | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -46,7 +48,7 @@ process DRAGMAP_ALIGN { --num-threads $task.cpus \\ $args \\ 2> ${prefix}.dragmap.log \\ - | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/dragmap/align/main.nf b/tests/modules/dragmap/align/main.nf index 92e8c265..4376602c 100644 --- a/tests/modules/dragmap/align/main.nf +++ b/tests/modules/dragmap/align/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' -include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' +include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' workflow test_dragmap_align_single_end { input = [ @@ -15,7 +15,20 @@ workflow test_dragmap_align_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) DRAGMAP_HASHTABLE ( fasta ) - DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) } workflow test_dragmap_align_paired_end { @@ -29,5 +42,19 @@ workflow test_dragmap_align_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) DRAGMAP_HASHTABLE ( fasta ) - DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) } diff --git a/tests/modules/dragmap/align/nextflow.config b/tests/modules/dragmap/align/nextflow.config index 50f50a7a..b968c357 100644 --- a/tests/modules/dragmap/align/nextflow.config +++ b/tests/modules/dragmap/align/nextflow.config @@ -1,5 +1,9 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} \ No newline at end of file + + withName: DRAGMAP_ALIGN { + ext.args2 = { sort_bam ? "" : "-bh" } + } + +} diff --git a/tests/modules/dragmap/align/test.yml b/tests/modules/dragmap/align/test.yml index 75c5ea96..b0196e55 100644 --- a/tests/modules/dragmap/align/test.yml +++ b/tests/modules/dragmap/align/test.yml @@ -7,6 +7,15 @@ - path: output/dragmap/test.bam - path: output/dragmap/test.dragmap.log +- name: dragmap align single-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + - name: dragmap align paired-end command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config tags: @@ -15,3 +24,12 @@ files: - path: output/dragmap/test.bam - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log From a9dd46f010e3974f00616606e209db8d81587c64 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Wed, 8 Dec 2021 23:43:36 +0100 Subject: [PATCH 294/314] add gatk4/gatherbqsr (#1130) * nf-core modules create * add module files * indent * remove templte code * manually revert pytest changes from tools * manually revert pytest changes from tools * add include statement back in Co-authored-by: Maxime U. Garcia --- modules/gatk4/gatherbqsrreports/main.nf | 41 ++++++++++++++++++ modules/gatk4/gatherbqsrreports/meta.yml | 43 +++++++++++++++++++ tests/config/pytest_modules.yml | 28 ++++++------ tests/modules/gatk4/gatherbqsrreports/main.nf | 27 ++++++++++++ .../gatk4/gatherbqsrreports/nextflow.config | 5 +++ .../modules/gatk4/gatherbqsrreports/test.yml | 21 +++++++++ 6 files changed, 153 insertions(+), 12 deletions(-) create mode 100644 modules/gatk4/gatherbqsrreports/main.nf create mode 100644 modules/gatk4/gatherbqsrreports/meta.yml create mode 100644 tests/modules/gatk4/gatherbqsrreports/main.nf create mode 100644 tests/modules/gatk4/gatherbqsrreports/nextflow.config create mode 100644 tests/modules/gatk4/gatherbqsrreports/test.yml diff --git a/modules/gatk4/gatherbqsrreports/main.nf b/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..1567f9aa --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,41 @@ +process GATK4_GATHERBQSRREPORTS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_1': + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_1' }" + + input: + tuple val(meta), path(recal_table) + + output: + tuple val(meta), path("*.table"), emit: table + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def input = recal_table.collect{"-I ${it}"}.join(' ') + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GatherBQSRReports] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" \\ + GatherBQSRReports \ + ${input} \ + --tmp-dir . \ + $args \ + --output ${prefix}.table + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/gatherbqsrreports/meta.yml b/modules/gatk4/gatherbqsrreports/meta.yml new file mode 100644 index 00000000..f71afd69 --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/meta.yml @@ -0,0 +1,43 @@ +name: gatk4_gatherbqsrreports +description: write your description here +keywords: + - gatk4 + - gatk4_gatherbqsrreports + - base_recalibration +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - recal_table: + type: file + description: File(s) containing BQSR table(s) + pattern: "*.table" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - recal_table: + type: file + description: File containing joined BQSR table + pattern: "*.table" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e35f8908..6dbfc1fd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -314,26 +314,26 @@ cnvkit/batch: - modules/cnvkit/batch/** - tests/modules/cnvkit/batch/** -cooler/digest: - - modules/cooler/digest/** - - tests/modules/cooler/digest/** - cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** +cooler/digest: + - modules/cooler/digest/** + - tests/modules/cooler/digest/** + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** -cooler/zoomify: - - modules/cooler/zoomify/** - - tests/software/cooler/zoomify/** - cooler/merge: - modules/cooler/merge/** - tests/modules/cooler/merge/** +cooler/zoomify: + - modules/cooler/zoomify/** + - tests/software/cooler/zoomify/** + csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** @@ -538,6 +538,10 @@ gatk4/filtermutectcalls: #&gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** +gatk4/gatherbqsrreports: + - modules/gatk4/gatherbqsrreports/** + - tests/modules/gatk4/gatherbqsrreports/** + gatk4/genomicsdbimport: #&gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -1035,6 +1039,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +peddy: + - modules/peddy/** + - tests/modules/peddy/** + phyloflash: - modules/phyloflash/** - tests/modules/phyloflash/** @@ -1043,10 +1051,6 @@ picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** -peddy: - - modules/peddy/** - - tests/modules/peddy/** - picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/modules/gatk4/gatherbqsrreports/main.nf b/tests/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..2693a06a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GATHERBQSRREPORTS } from '../../../../modules/gatk4/gatherbqsrreports/main.nf' + +workflow test_gatk4_gatherbqsrreports { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true) + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} + +workflow test_gatk4_gatherbqsrreports_multiple { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_baserecalibrator_table'], checkIfExists: true) + ] + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} diff --git a/tests/modules/gatk4/gatherbqsrreports/nextflow.config b/tests/modules/gatk4/gatherbqsrreports/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/gatk4/gatherbqsrreports/test.yml b/tests/modules/gatk4/gatherbqsrreports/test.yml new file mode 100644 index 00000000..76c90120 --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/test.yml @@ -0,0 +1,21 @@ +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 9603b69fdc3b5090de2e0dd78bfcc4bf + - path: output/gatk4/versions.yml + md5sum: 50238fd0f3b6f4efb2b5335b6324f905 + +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports_multiple + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports_multiple -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 0c1257eececf95db8ca378272d0f21f9 + - path: output/gatk4/versions.yml + md5sum: c6ce163062dd3609848fc5bc10660427 From 37c5cb495d40118b13a0ecda648da9512ee5a9fc Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Thu, 9 Dec 2021 11:04:53 +0100 Subject: [PATCH 295/314] feat: add original input as optional output channel (#1147) --- modules/samtools/index/main.nf | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index b033e225..74ae15df 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,10 +11,13 @@ process SAMTOOLS_INDEX { tuple val(meta), path(input) output: - tuple val(meta), path("*.bai") , optional:true, emit: bai - tuple val(meta), path("*.crai"), optional:true, emit: crai - tuple val(meta), path("*.csi") , optional:true, emit: csi - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam" , includeInputs:true), path("*.bai") , optional:true, emit: bam_bai + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.bam" , includeInputs:true), path("*.csi") , optional:true, emit: bam_csi + tuple val(meta), path("*.csi") , optional:true, emit: csi + tuple val(meta), path("*.cram", includeInputs:true), path("*.crai"), optional:true, emit: cram_crai + tuple val(meta), path("*.crai") , optional:true, emit: crai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' From a68c563e54d5f3720fc57ef6e34ff08c4b3ec398 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Thu, 9 Dec 2021 11:16:40 +0100 Subject: [PATCH 296/314] Added UMI sub-workflow (#1098) * added code for subworkflow fgbio call umi consensus * ironing out a few typos etc * fixing last things * fixed md5sum - lets see if it changes * removing file accidentally deleted * tidy indents * added bwamem2 alternative * fixed entry for both tests * changed name second test workflow entry * fixed workflow entry names * fixed md5sum for file generated with bwamem2 * added syntax new DSL2 * added new config location in test command line * added new config location in test command line * use of prefix instead of suffix because modules have been changed in this way * explicit alias to bwa mem1 to avoid confusion * removed param that should be an ext optional argument in fgbio groupreadsbyumi * missing colon in config * missing colon in module config too * order list alphabetically Co-authored-by: Maxime U. Garcia * remove params from body Co-authored-by: Maxime U. Garcia * improving readability of input structure Co-authored-by: Mahesh Binzer-Panchal * reverting to mandatory input * fixed tests and workflow take values * remove param Co-authored-by: Maxime U. Garcia * simplify tests params Co-authored-by: Maxime U. Garcia * formatting inputs for readability * factoring in changes to bwamem2_mem and bwa_mem sort/view inputs * updating test md5sum for grouped file following code update in bwamem Co-authored-by: Maxime U. Garcia Co-authored-by: Maxime U. Garcia Co-authored-by: Mahesh Binzer-Panchal --- .../fgbio_create_umi_consensus/main.nf | 86 +++++++++++++++++++ .../fgbio_create_umi_consensus/meta.yml | 67 +++++++++++++++ tests/modules/fgbio/groupreadsbyumi/main.nf | 3 +- .../fgbio_create_umi_consensus/main.nf | 33 +++++++ .../nextflow.config | 31 +++++++ .../fgbio_create_umi_consensus/test.yml | 22 +++++ 6 files changed, 240 insertions(+), 2 deletions(-) create mode 100644 subworkflows/nf-core/fgbio_create_umi_consensus/main.nf create mode 100644 subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..042d0bbd --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,86 @@ +// +// Runs FGBIO tools to remove UMI tags from FASTQ reads +// Convert them to unmapped BAM file, map them to the reference genome, +// use the mapped information to group UMIs and generate consensus reads +// + + +include { BWAMEM2_INDEX } from '../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../modules/bwamem2/mem/main' +include { BWA_INDEX as BWAMEM1_INDEX } from '../../../modules/bwa/index/main.nf' +include { BWA_MEM as BWAMEM1_MEM } from '../../../modules/bwa/mem/main' +include { FGBIO_CALLMOLECULARCONSENSUSREADS as CALLUMICONSENSUS } from '../../../modules/fgbio/callmolecularconsensusreads/main.nf' +include { FGBIO_FASTQTOBAM as FASTQTOBAM } from '../../../modules/fgbio/fastqtobam/main' +include { FGBIO_GROUPREADSBYUMI as GROUPREADSBYUMI } from '../../../modules/fgbio/groupreadsbyumi/main' +include { SAMBLASTER } from '../../../modules/samblaster/main' +include { SAMTOOLS_BAM2FQ as BAM2FASTQ } from '../../../modules/samtools/bam2fq/main.nf' + + +workflow CREATE_UMI_CONSENSUS { + take: + reads // channel: [mandatory] [ val(meta), [ reads ] ] + fasta // channel: [mandatory] /path/to/reference/fasta + read_structure // string: [mandatory] "read_structure" + groupreadsbyumi_strategy // string: [mandatory] grouping strategy - default: "Adjacency" + aligner // string: [mandatory] "bwa-mem" or "bwa-mem2" + + main: + ch_versions = Channel.empty() + + // using information in val(read_structure) FASTQ reads are converted into + // a tagged unmapped BAM file (uBAM) + FASTQTOBAM ( reads, read_structure ) + ch_versions = ch_versions.mix(FASTQTOBAM.out.version) + + // in order to map uBAM using BWA MEM, we need to convert uBAM to FASTQ + // but keep the appropriate UMI tags in the FASTQ comment field and produce + // an interleaved FASQT file (hence, split = false) + split = false + BAM2FASTQ ( FASTQTOBAM.out.umibam, split ) + ch_versions = ch_versions.mix(BAM2FASTQ.out.versions) + + // the user can choose here to use either bwa-mem (default) or bwa-mem2 + aligned_bam = Channel.empty() + + if (aligner == "bwa-mem") { + // reference is indexed + BWAMEM1_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM1_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM1_MEM ( BAM2FASTQ.out.reads, BWAMEM1_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM1_MEM.out.versions) + aligned_bam = BWAMEM1_MEM.out.bam + } else { + // reference is indexed + BWAMEM2_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM2_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM2_MEM ( BAM2FASTQ.out.reads, BWAMEM2_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM2_MEM.out.versions) + aligned_bam = BWAMEM2_MEM.out.bam + } + + // samblaster is used in order to tag mates information in the BAM file + // this is used in order to group reads by UMI + SAMBLASTER ( aligned_bam ) + ch_versions = ch_versions.mix(SAMBLASTER.out.versions) + + // appropriately tagged reads are now grouped by UMI information + GROUPREADSBYUMI ( SAMBLASTER.out.bam, groupreadsbyumi_strategy ) + ch_versions = ch_versions.mix(GROUPREADSBYUMI.out.versions) + + // using the above created groups, a consensus across reads in the same grou + // can be called + // this will emit a consensus BAM file + CALLUMICONSENSUS ( GROUPREADSBYUMI.out.bam ) + ch_versions = ch_versions.mix(CALLUMICONSENSUS.out.versions) + + emit: + ubam = FASTQTOBAM.out.umibam // channel: [ val(meta), [ bam ] ] + groupbam = GROUPREADSBYUMI.out.bam // channel: [ val(meta), [ bam ] ] + consensusbam = CALLUMICONSENSUS.out.bam // channel: [ val(meta), [ bam ] ] + versions = ch_versions // channel: [ versions.yml ] +} + diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml new file mode 100644 index 00000000..2cb61206 --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml @@ -0,0 +1,67 @@ +name: fgbio_create_umi_consensus +description: | + This workflow uses the suite FGBIO to identify and remove UMI tags from FASTQ reads + convert them to unmapped BAM file, map them to the reference genome, + and finally use the mapped information to group UMIs and generate consensus reads in each group +keywords: + - fgbio + - umi + - samblaster + - samtools + - bwa +modules: + - bwa/index + - bwa/mem + - fgbio/fastqtobam + - fgbio/groupreadsbyumi + - fgbio/callmolecularconsensusreads + - samblaster + - samtools/bam2fq +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - reads: + type: list + description: list umi-tagged reads + pattern: "[ *.{fastq.gz/fq.gz} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - read_structure: + type: string + description: | + A read structure should always be provided for each of the fastq files. + If single end, the string will contain only one structure (i.e. "2M11S+T"), if paired-end the string + will contain two structures separated by a blank space (i.e. "2M11S+T 2M11S+T"). + If the read does not contain any UMI, the structure will be +T (i.e. only template of any length). + https://github.com/fulcrumgenomics/fgbio/wiki/Read-Structures + - groupreadsbyumi_strategy: + type: string + description: | + Reguired argument: defines the UMI assignment strategy. + Must be chosen among: Identity, Edit, Adjacency, Paired. +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - ubam: + type: file + description: unmapped bam file + pattern: '*.bam' + - groupbam: + type: file + description: mapped bam file, where reads are grouped by UMI tag + pattern: '*.bam' + - consensusbam: + type: file + description: | + mapped bam file, where reads are created as consensus of those + belonging to the same UMI group + pattern: '*.bam' +authors: + - '@lescai' diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf index 1d5fb474..b9bb350a 100644 --- a/tests/modules/fgbio/groupreadsbyumi/main.nf +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -10,7 +10,6 @@ workflow test_fgbio_groupreadsbyumi { [ id:'test', single_end:false ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] - strategy = "Adjacency" - FGBIO_GROUPREADSBYUMI ( input, strategy ) + FGBIO_GROUPREADSBYUMI ( input, 'Adjacency' ) } diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..6b02bbc8 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CREATE_UMI_CONSENSUS } from '../../../../subworkflows/nf-core/fgbio_create_umi_consensus/main' + +workflow test_fgbio_create_umi_consensus_mem1 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem" ) +} + +workflow test_fgbio_create_umi_consensus_mem2 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem2" ) +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config new file mode 100644 index 00000000..a55a4213 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: BWA_MEM { + ext.args = '-p -C -M' + } + + withName: BWAMEM2_MEM { + ext.args = '-p -C -M' + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1 -S Coordinate' + ext.prefix = { "${meta.id}_umiconsensus" } + } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.prefix = { "${meta.id}_processed" } + } + +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml new file mode 100644 index 00000000..2db70d3f --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml @@ -0,0 +1,22 @@ +- name: fgbio_create_umi_consensus_bwamem1 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem1 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: 44f31da850d5a8100b43b629426f2e17 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 +- name: fgbio_create_umi_consensus_bwamem2 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem2 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus_bwamem2 + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: c69333155038b9a968fd096627d4dfb0 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 From 1f3f2b18bb11be73b90d3a19e4f764aa88612f91 Mon Sep 17 00:00:00 2001 From: Anan Ibrahim <81744003+Darcy220606@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:17:50 +0100 Subject: [PATCH 297/314] Add new module macrel/contigs (#1109) * Add new module macrel/contigs * removed trailing whitespace * removed whitespace * linting cleanup * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * Updated the test.yml It didnt upload the updated version earlier for some reason :( * Update test.yml * Update test.yml * Update test.yml as generated by pytest * Update test.yml * updated the version issue * Update tests/modules/macrel/contigs/test.yml * Update modules/macrel/contigs/main.nf * Update modules/macrel/contigs/main.nf * Update pytest_modules.yml * Update pytest_modules.yml * Update modules/macrel/contigs/main.nf Co-authored-by: James A. Fellows Yates * Zipped all fasta outputs * Update main.nf * Update test.yml * Update test.yml * Update main.nf * Update main.nf (gzip -n) * Update test.yml * Update main.nf * Update main.nf * Update test.yml * Update tests/modules/macrel/contigs/test.yml * Update modules/macrel/contigs/main.nf * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: darcy220606 Co-authored-by: James A. Fellows Yates --- modules/macrel/contigs/main.nf | 40 +++++++++++++ modules/macrel/contigs/meta.yml | 61 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/macrel/contigs/main.nf | 15 +++++ tests/modules/macrel/contigs/nextflow.config | 5 ++ tests/modules/macrel/contigs/test.yml | 16 +++++ 6 files changed, 141 insertions(+) create mode 100644 modules/macrel/contigs/main.nf create mode 100644 modules/macrel/contigs/meta.yml create mode 100644 tests/modules/macrel/contigs/main.nf create mode 100644 tests/modules/macrel/contigs/nextflow.config create mode 100644 tests/modules/macrel/contigs/test.yml diff --git a/modules/macrel/contigs/main.nf b/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..558ef6e8 --- /dev/null +++ b/modules/macrel/contigs/main.nf @@ -0,0 +1,40 @@ +process MACREL_CONTIGS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::macrel=1.1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macrel:1.1.0--py36hc5360cc_0': + 'quay.io/biocontainers/macrel:1.1.0--py36hc5360cc_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*/*.smorfs.faa.gz") , emit: smorfs + tuple val(meta), path("*/*.all_orfs.faa.gz") , emit: all_orfs + tuple val(meta), path("*/*.prediction.gz") , emit: amp_prediction + tuple val(meta), path("*/*.md") , emit: readme_file + tuple val(meta), path("*/*_log.txt") , emit: log_file + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + macrel contigs \\ + $args \\ + --fasta $fasta \\ + --output ${prefix}/ \\ + --tag ${prefix} \\ + --log-file ${prefix}/${prefix}_log.txt \\ + --threads $task.cpus + + gzip --no-name ${prefix}/*.faa + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + macrel: \$(echo \$(macrel --version | sed 's/macrel //g')) + END_VERSIONS + """ +} diff --git a/modules/macrel/contigs/meta.yml b/modules/macrel/contigs/meta.yml new file mode 100644 index 00000000..e0b2fabd --- /dev/null +++ b/modules/macrel/contigs/meta.yml @@ -0,0 +1,61 @@ +name: macrel_contigs +description: A tool that mines antimicrobial peptides (AMPs) from (meta)genomes by predicting peptides from genomes (provided as contigs) and outputs all the predicted anti-microbial peptides found. +keywords: + - AMP + - antimicrobial peptides + - genome mining + - metagenomes + - peptide prediction +tools: + - macrel: + description: A pipeline for AMP (antimicrobial peptide) prediction + homepage: https://macrel.readthedocs.io/en/latest/ + documentation: https://macrel.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/BigDataBiology/macrel + doi: "10.7717/peerj.10555" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: A fasta file with nucleotide sequences. + pattern: "*.{fasta,fa,fna,fasta.gz,fa.gz,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - amp_prediction: + type: file + description: A zipped file, with all predicted amps in a table format. + pattern: "*.prediction.gz" + - smorfs: + type: file + description: A zipped fasta file containing aminoacid sequences showing the general gene prediction information in the contigs. + pattern: "*.smorfs.faa.gz" + - all_orfs: + type: file + description: A zipped fasta file containing amino acid sequences showing the general gene prediction information in the contigs. + pattern: "*.all_orfs.faa.gz" + - readme_file: + type: file + description: A readme file containing tool specific information (e.g. citations, details about the output, etc.). + pattern: "*.md" + - log_file: + type: file + description: A log file containing the information pertaining to the run. + pattern: "*_log.txt" + +authors: + - "@darcy220606" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6dbfc1fd..7601671b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -842,6 +842,10 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +macrel/contigs: + - modules/macrel/contigs/** + - tests/modules/macrel/contigs/** + macs2/callpeak: - modules/macs2/callpeak/** - tests/modules/macs2/callpeak/** diff --git a/tests/modules/macrel/contigs/main.nf b/tests/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..a613dcc4 --- /dev/null +++ b/tests/modules/macrel/contigs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACREL_CONTIGS } from '../../../../modules/macrel/contigs/main.nf' + +workflow test_macrel_contigs { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) + ] + + MACREL_CONTIGS ( input ) +} diff --git a/tests/modules/macrel/contigs/nextflow.config b/tests/modules/macrel/contigs/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/macrel/contigs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/macrel/contigs/test.yml b/tests/modules/macrel/contigs/test.yml new file mode 100644 index 00000000..af272605 --- /dev/null +++ b/tests/modules/macrel/contigs/test.yml @@ -0,0 +1,16 @@ +- name: macrel contigs test_macrel_contigs + command: nextflow run ./tests/modules/macrel/contigs -entry test_macrel_contigs -c ./tests/config/nextflow.config -c ./tests/modules/macrel/contigs/nextflow.config + tags: + - macrel/contigs + - macrel + files: + - path: output/macrel/test/README.md + md5sum: fa3706dfc95d0538a52c4d0d824be5fb + - path: output/macrel/test/test.all_orfs.faa.gz + - path: output/macrel/test/test.prediction.gz + - path: output/macrel/test/test.smorfs.faa.gz + md5sum: 79704c6120c2f794518301af6f9b963d + - path: output/macrel/test/test_log.txt + md5sum: 6fdba143dce759597eb9f80e5d968729 + - path: output/macrel/versions.yml + md5sum: be8bf0d0647751c635c3736655f29f85 From be8528998134132c6f4823f627bbb0aec6a51747 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 9 Dec 2021 15:00:32 +0100 Subject: [PATCH 298/314] Update metabat2 output channels and compression (#1111) * feat: each output type has dedicated channel replace bgzip with gzip can only zip one at a time * Add condition moving of unbinned files * fix: solution for moving sometimes non-existant files * fix: update meta.yml to add the new channels * fix: remove most of the checksums due to variability * fix: tweaking of output * Update modules/metabat2/metabat2/main.nf Co-authored-by: Maxime Borry * Fix find commands * Fix find commands Co-authored-by: Maxime Borry --- modules/metabat2/metabat2/main.nf | 15 ++++++++++----- modules/metabat2/metabat2/meta.yml | 13 +++++++++++++ tests/modules/metabat2/metabat2/test.yml | 15 ++++++++------- 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index 2d01fdf6..a8af0ae9 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -11,9 +11,12 @@ process METABAT2_METABAT2 { tuple val(meta), path(fasta), path(depth) output: - tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta - tuple val(meta), path("*.tsv.gz"), optional:true , emit: membership - path "versions.yml" , emit: versions + tuple val(meta), path("*.tooShort.fa.gz") , optional:true , emit: tooshort + tuple val(meta), path("*.lowDepth.fa.gz") , optional:true , emit: lowdepth + tuple val(meta), path("*.unbinned.fa.gz") , optional:true , emit: unbinned + tuple val(meta), path("*.tsv.gz") , optional:true , emit: membership + tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' @@ -33,8 +36,10 @@ process METABAT2_METABAT2 { mv metabat2/${prefix} ${prefix}.tsv mv metabat2 bins - bgzip --threads $task.cpus ${prefix}.tsv - bgzip --threads $task.cpus bins/*.fa + + gzip ${prefix}.tsv + find ./bins/ -name "*.fa" -type f | xargs -t -n 1 bgzip -@ ${task.cpus} + find ./bins/ -name "*[lowDepth,tooShort,unbinned].fa.gz" -type f -exec mv {} . \\; cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/metabat2/metabat2/meta.yml b/modules/metabat2/metabat2/meta.yml index a7f3a7ff..0ec07b02 100644 --- a/modules/metabat2/metabat2/meta.yml +++ b/modules/metabat2/metabat2/meta.yml @@ -46,6 +46,18 @@ output: type: file description: Bins created from assembled contigs in fasta file pattern: "*.fa.gz" + - tooshort: + type: file + description: Contigs that did not pass length filtering + pattern: "*.tooShort.fa.gz" + - lowdepth: + type: file + description: Contigs that did not have sufficient depth for binning + pattern: "*.lowDepth.fa.gz" + - unbinned: + type: file + description: Contigs that pass length and depth filtering but could not be binned + pattern: "*.unbinned.fa.gz" - membership: type: file description: cluster memberships as a matrix format. @@ -54,3 +66,4 @@ output: authors: - "@maxibor" + - "@jfy133" diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml index 1a8660a7..9389295e 100644 --- a/tests/modules/metabat2/metabat2/test.yml +++ b/tests/modules/metabat2/metabat2/test.yml @@ -1,23 +1,24 @@ - name: metabat2 metabat2 test_metabat2_no_depth - command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config tags: - - metabat2/metabat2 - metabat2 + - metabat2/metabat2 files: - path: output/metabat2/bins/test.1.fa.gz md5sum: 0e9bce5b5a0033fd4411a21dec881170 - path: output/metabat2/test.tsv.gz - md5sum: ea77e8c4426d2337419905b57f1ec335 + - path: output/metabat2/versions.yml + md5sum: 5742a71af36c3a748fd5726d76924ba8 - name: metabat2 metabat2 test_metabat2_depth - command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config tags: - - metabat2/metabat2 - metabat2 + - metabat2/metabat2 files: - path: output/metabat2/bins/test.1.fa.gz md5sum: 0e9bce5b5a0033fd4411a21dec881170 - path: output/metabat2/test.tsv.gz - md5sum: ea77e8c4426d2337419905b57f1ec335 - path: output/metabat2/test.txt.gz - md5sum: 8f735aa408d6c90e5a0310e06ace7a9a + - path: output/metabat2/versions.yml + md5sum: 538c56b2df7d90580f05097218b5d5b1 From 0bf40a26bdc7cd90472956771daebabf52c68b49 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Thu, 9 Dec 2021 11:07:47 -0600 Subject: [PATCH 299/314] Update seqwish to version 0.7.2 (#1144) --- modules/seqwish/induce/main.nf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index 089f3478..6d6b33e7 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,14 +1,14 @@ -def VERSION = '0.7.1' // Version information not provided by tool on CLI +def VERSION = '0.7.2' // Version information not provided by tool on CLI process SEQWISH_INDUCE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) + conda (params.enable_conda ? 'bioconda::seqwish=0.7.2' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0' : - 'quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0' }" + 'https://depot.galaxyproject.org/singularity/seqwish:0.7.2--h2e03b76_0' : + 'quay.io/biocontainers/seqwish:0.7.2--h2e03b76_0' }" input: tuple val(meta), path(paf), path(fasta) From edbbbbf42097c0696305dd2fd6d0e1070a7dbb07 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 13 Dec 2021 09:13:24 +0100 Subject: [PATCH 300/314] fix: exclude conda/singularity images from pytest workflow output (#1162) --- .github/workflows/pytest-workflow.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 0bd892c8..cc7c9313 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -103,3 +103,5 @@ jobs: /home/runner/pytest_workflow_*/*/log.out /home/runner/pytest_workflow_*/*/log.err /home/runner/pytest_workflow_*/*/work + !/home/runner/pytest_workflow_*/*/work/conda + !/home/runner/pytest_workflow_*/*/work/singularity From 47a9cf8ecbe4de4dcb8b9cc6731fece82b934ab7 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 13 Dec 2021 14:10:29 +0000 Subject: [PATCH 301/314] Fix transcriptome staging issues on DNAnexus for rsem/prepareference (#1163) --- modules/rsem/preparereference/main.nf | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 95597b74..a5b8922a 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -12,9 +12,9 @@ process RSEM_PREPAREREFERENCE { path gtf output: - path "rsem" , emit: index - path "rsem/*transcripts.fa", emit: transcript_fasta - path "versions.yml" , emit: versions + path "rsem" , emit: index + path "*transcripts.fa", emit: transcript_fasta + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' @@ -40,6 +40,8 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml "${task.process}": rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") @@ -55,6 +57,8 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml "${task.process}": rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") From 0fafaeebf52cc5ab554b83297ed02a48d852a848 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 13 Dec 2021 16:15:20 +0000 Subject: [PATCH 302/314] Revert PR #1147 (#1166) * Revert PR #1147 * Fix md5sum for crai file --- modules/samtools/index/main.nf | 11 ++++------- tests/modules/samtools/index/test.yml | 2 +- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 74ae15df..db025a8f 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,13 +11,10 @@ process SAMTOOLS_INDEX { tuple val(meta), path(input) output: - tuple val(meta), path("*.bam" , includeInputs:true), path("*.bai") , optional:true, emit: bam_bai - tuple val(meta), path("*.bai") , optional:true, emit: bai - tuple val(meta), path("*.bam" , includeInputs:true), path("*.csi") , optional:true, emit: bam_csi - tuple val(meta), path("*.csi") , optional:true, emit: csi - tuple val(meta), path("*.cram", includeInputs:true), path("*.crai"), optional:true, emit: cram_crai - tuple val(meta), path("*.crai") , optional:true, emit: crai - path "versions.yml" , emit: versions + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.csi") , optional:true, emit: csi + tuple val(meta), path("*.crai"), optional:true, emit: crai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 6972ed65..7184be8f 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -14,7 +14,7 @@ - samtools/index files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai - md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 + md5sum: 14bc3bd5c89cacc8f4541f9062429029 - name: samtools index test_samtools_index_csi command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config From 826a5603db5cf5b4f1e55cef9cc0b7c37d3c7e70 Mon Sep 17 00:00:00 2001 From: Mingda Jin Date: Mon, 13 Dec 2021 11:18:27 -0800 Subject: [PATCH 303/314] Stage fastq for concat in subfolders to avoid name collision issue (#1107) * Stage fastq for concat in subfolders in task workdir * Update main.nf * Update test.yml Co-authored-by: Harshil Patel --- modules/cat/fastq/main.nf | 8 ++++---- tests/modules/cat/fastq/main.nf | 22 ++++++++++++++++++++++ tests/modules/cat/fastq/test.yml | 26 +++++++++++++++++++++++--- 3 files changed, 49 insertions(+), 7 deletions(-) diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index c5ece83a..d02598e1 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -8,7 +8,7 @@ process CAT_FASTQ { 'biocontainers/biocontainers:v1.2.0_cv1' }" input: - tuple val(meta), path(reads) + tuple val(meta), path(reads, stageAs: "input*/*") output: tuple val(meta), path("*.merged.fastq.gz"), emit: reads @@ -21,7 +21,7 @@ process CAT_FASTQ { if (meta.single_end) { if (readList.size > 1) { """ - cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz + cat ${readList.join(' ')} > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -35,8 +35,8 @@ process CAT_FASTQ { def read2 = [] readList.eachWithIndex{ v, ix -> ( ix & 1 ? read2 : read1 ) << v } """ - cat ${read1.sort().join(' ')} > ${prefix}_1.merged.fastq.gz - cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz + cat ${read1.join(' ')} > ${prefix}_1.merged.fastq.gz + cat ${read2.join(' ')} > ${prefix}_2.merged.fastq.gz cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index c3da91d2..1ed23ce5 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -25,3 +25,25 @@ workflow test_cat_fastq_paired_end { CAT_FASTQ ( input ) } + +workflow test_cat_fastq_single_end_same_name { + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} + +workflow test_cat_fastq_paired_end_same_name { + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 89ddf331..56374060 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -5,7 +5,7 @@ - cat/fastq files: - path: ./output/cat/test.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d - name: cat fastq fastqc_paired_end command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config @@ -14,6 +14,26 @@ - cat/fastq files: - path: ./output/cat/test_2.merged.fastq.gz - md5sum: d2b1a836eef1058738ecab36c907c5ba + md5sum: 77c8e966e130d8c6b6ec9be52fcb2bda - path: ./output/cat/test_1.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d + +- name: cat fastq single-end-same-name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + +- name: cat fastq fastqc_paired_end_same_name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test_1.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + - path: ./output/cat/test_2.merged.fastq.gz + md5sum: fe9f266f43a6fc3dcab690a18419a56e From 4e5406c221e91ede93d9d8036110da62c86cca66 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 14 Dec 2021 12:58:25 +0100 Subject: [PATCH 304/314] Add helper script to find duplicate test YAML md5s (#1167) * Add helper script to find duplicate test YAML md5s * Count how many modules the duplicates affect --- .github/check_duplicate_md5s.py | 82 +++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 .github/check_duplicate_md5s.py diff --git a/.github/check_duplicate_md5s.py b/.github/check_duplicate_md5s.py new file mode 100644 index 00000000..60506ab5 --- /dev/null +++ b/.github/check_duplicate_md5s.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +from rich import print +from rich.table import Table +import click +import glob +import os +import yaml + + +@click.command() +@click.option( + "--min_dups", + default=5, + show_default=True, + help="Minimum number of duplicates to report", +) +@click.option( + "--search_dir", + default=f"{os.path.dirname(__file__)}/../tests/**/test.yml", + show_default=True, + help="Glob directory pattern used to find test YAML files", +) +def find_duplicate_md5s(min_dups, search_dir): + """ + Find duplicate file MD5 sums in test YAML files. + """ + md5_filenames = {} + md5_output_fn_counts = {} + module_counts = {} + + # Loop through all files in tests/ called test.yml + for test_yml in glob.glob(search_dir, recursive=True): + # Open file and parse YAML + with open(test_yml, "r") as fh: + test_config = yaml.safe_load(fh) + # Loop through tests and check for duplicate md5s + for test in test_config: + for test_file in test.get("files", []): + if "md5sum" in test_file: + md5 = test_file["md5sum"] + md5_filenames[md5] = md5_filenames.get(md5, []) + [ + os.path.basename(test_file.get("path")) + ] + md5_output_fn_counts[md5] = md5_output_fn_counts.get(md5, 0) + 1 + # Log the module that this md5 was in + modname = os.path.basename(os.path.dirname(test_yml)) + # If tool/subtool show the whole thing + # Ugly code but trying to stat os-agnostic + if os.path.basename( + os.path.dirname(os.path.dirname(test_yml)) + ) not in ["modules", "config", "subworkflows"]: + modname = "{}/{}".format( + os.path.basename( + os.path.dirname(os.path.dirname(test_yml)) + ), + os.path.basename(os.path.dirname(test_yml)), + ) + module_counts[md5] = module_counts.get(md5, []) + [modname] + + # Set up rich table + table = Table(title="Duplicate MD5s", row_styles=["dim", ""]) + table.add_column("MD5", style="cyan", no_wrap=True) + table.add_column("Count", style="magenta", justify="right") + table.add_column("Num modules", style="blue", justify="right") + table.add_column("Filenames", style="green") + + # Add rows - sort md5_output_fn_counts by value + for md5 in sorted(md5_output_fn_counts, key=md5_output_fn_counts.get): + if md5_output_fn_counts[md5] >= min_dups: + table.add_row( + md5, + str(md5_output_fn_counts[md5]), + str(len(set(module_counts[md5]))), + ", ".join(set(md5_filenames[md5])), + ) + + print(table) + + +if __name__ == "__main__": + find_duplicate_md5s() From 61376425fb7996d38ae9ac78dfd0dafd4ac7b8a5 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Tue, 14 Dec 2021 16:10:08 +0000 Subject: [PATCH 305/314] BugFix: add missing tbi output channel to genotypeGVCFs (#1153) * initial commit to setup branch * workflow finished * Update nextflow.config * tumour to tumor, getpileup passed as nomral and tumor * paired_somatic renamed to tumor_normal_somatic * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Update subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf Co-authored-by: Maxime U. Garcia * updated index names in meta.yml * changed index file names in main script and test * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Apply suggestions from code review * fixed bug from changes * Apply suggestions from code review * modified yml to allow new subworkflow testing * Update test.yml * Update test.yml * added output channel for tbi files, tweaked method of adding blank inputs for gendb tests * Update main.nf * Update main.nf Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia --- modules/gatk4/genotypegvcfs/main.nf | 7 +-- modules/gatk4/genotypegvcfs/meta.yml | 4 ++ tests/config/test_data.config | 2 +- tests/modules/gatk4/genotypegvcfs/main.nf | 56 ++++++++-------------- tests/modules/gatk4/genotypegvcfs/test.yml | 9 ++++ 5 files changed, 38 insertions(+), 40 deletions(-) diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index 1a772860..50a6e188 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -2,10 +2,10 @@ process GATK4_GENOTYPEGVCFS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : - 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(gvcf), path(gvcf_index) @@ -18,6 +18,7 @@ process GATK4_GENOTYPEGVCFS { output: tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi path "versions.yml" , emit: versions script: diff --git a/modules/gatk4/genotypegvcfs/meta.yml b/modules/gatk4/genotypegvcfs/meta.yml index cd7457a7..e6b38863 100644 --- a/modules/gatk4/genotypegvcfs/meta.yml +++ b/modules/gatk4/genotypegvcfs/meta.yml @@ -60,6 +60,10 @@ output: type: file description: Genotyped VCF file pattern: "*.vcf.gz" + - tbi: + type: file + description: Tbi index for VCF file + pattern: "*.vcf.gz" - versions: type: file description: File containing software versions diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 31e17618..b2edcb6f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -193,9 +193,9 @@ params { test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" test_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.pileups.table" test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" - test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_pon_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_pon_genomicsdb.tar.gz" test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf index 208faf8b..ce13ea2d 100644 --- a/tests/modules/gatk4/genotypegvcfs/main.nf +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -7,7 +7,7 @@ include { UNTAR } from '../../../../modules/untar/main.nf' // Basic parameters with uncompressed VCF input workflow test_gatk4_genotypegvcfs_vcf_input { - + input = [ [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] @@ -21,7 +21,7 @@ workflow test_gatk4_genotypegvcfs_vcf_input { // Basic parameters with compressed VCF input workflow test_gatk4_genotypegvcfs_gz_input { - + input = [ [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] @@ -35,7 +35,7 @@ workflow test_gatk4_genotypegvcfs_gz_input { // Basic parameters + optional dbSNP workflow test_gatk4_genotypegvcfs_gz_input_dbsnp { - + input = [ [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] @@ -52,7 +52,7 @@ workflow test_gatk4_genotypegvcfs_gz_input_dbsnp { // Basic parameters + optional intervals workflow test_gatk4_genotypegvcfs_gz_input_intervals { - + input = [ [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] @@ -68,7 +68,7 @@ workflow test_gatk4_genotypegvcfs_gz_input_intervals { // Basic parameters + optional dbSNP + optional intervals workflow test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals { - + input = [ [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] @@ -95,20 +95,16 @@ workflow test_gatk4_genotypegvcfs_gendb_input { test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) UNTAR ( test_genomicsdb ) - - Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} - Channel - .of([ id:'test' ]) - .combine(UNTAR.out.untar) - .combine(mock_gvcf_index) - .set{ input } + gendb = UNTAR.out.untar.collect() + gendb.add([]) + input = Channel.of([ id:'test' ]).combine(gendb) GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) } // Basic parameters with GenomicsDB + optional dbSNP workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { - + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -119,20 +115,16 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) UNTAR ( test_genomicsdb ) - - Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} - Channel - .of([ id:'test' ]) - .combine(UNTAR.out.untar) - .combine(mock_gvcf_index) - .set{ input } + gendb = UNTAR.out.untar.collect() + gendb.add([]) + input = Channel.of([ id:'test' ]).combine(gendb) GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, [] ) } // Basic parameters with GenomicsDB + optional intervals workflow test_gatk4_genotypegvcfs_gendb_input_intervals { - + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -142,20 +134,16 @@ workflow test_gatk4_genotypegvcfs_gendb_input_intervals { test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) UNTAR ( test_genomicsdb ) - - Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} - Channel - .of([ id:'test' ]) - .combine(UNTAR.out.untar) - .combine(mock_gvcf_index) - .set{ input } + gendb = UNTAR.out.untar.collect() + gendb.add([]) + input = Channel.of([ id:'test' ]).combine(gendb) GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], intervalsBed ) } // Basic parameters with GenomicsDB + optional dbSNP + optional intervals workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { - + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) @@ -168,13 +156,9 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) UNTAR ( test_genomicsdb ) - - Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} - Channel - .of([ id:'test' ]) - .combine(UNTAR.out.untar) - .combine(mock_gvcf_index) - .set{ input } + gendb = UNTAR.out.untar.collect() + gendb.add([]) + input = Channel.of([ id:'test' ]).combine(gendb) GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, intervalsBed ) } diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml index 45201af2..891bc365 100644 --- a/tests/modules/gatk4/genotypegvcfs/test.yml +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -6,6 +6,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -15,6 +16,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -24,6 +26,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -33,6 +36,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -42,6 +46,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -51,6 +56,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -60,6 +66,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -69,6 +76,7 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -78,3 +86,4 @@ files: - path: output/gatk4/test.genotyped.vcf.gz contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + - path: output/gatk4/test.genotyped.vcf.gz.tbi From ae9215976253e0c5324c5db11f9d5bc29ae635da Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Wed, 15 Dec 2021 15:57:42 +0000 Subject: [PATCH 306/314] New subtool: hmmcopy/generatemap (#1168) * Initial structure * Working with local singularity image * Working generateMap.pl script * Remote not working bioconda * Working generateMap with biocontainer * Lint changes * Updated hmmcopy container version to be consistent * Fix failing test * Remove path to perl * No hardpath to script * Update main.nf Moved version outside of process, add support for zipped fasta file * Revert to not allowing gzip via pipe, as perl script can't cope Co-authored-by: Simon Pearce --- modules/hmmcopy/gccounter/main.nf | 4 +- modules/hmmcopy/generatemap/main.nf | 38 +++++++++++++++++++ modules/hmmcopy/generatemap/meta.yml | 32 ++++++++++++++++ modules/hmmcopy/readcounter/main.nf | 4 +- tests/config/pytest_modules.yml | 4 ++ tests/modules/hmmcopy/generatemap/main.nf | 12 ++++++ .../hmmcopy/generatemap/nextflow.config | 5 +++ tests/modules/hmmcopy/generatemap/test.yml | 10 +++++ tests/modules/hmmcopy/readcounter/test.yml | 6 ++- 9 files changed, 109 insertions(+), 6 deletions(-) create mode 100644 modules/hmmcopy/generatemap/main.nf create mode 100644 modules/hmmcopy/generatemap/meta.yml create mode 100644 tests/modules/hmmcopy/generatemap/main.nf create mode 100644 tests/modules/hmmcopy/generatemap/nextflow.config create mode 100644 tests/modules/hmmcopy/generatemap/test.yml diff --git a/modules/hmmcopy/gccounter/main.nf b/modules/hmmcopy/gccounter/main.nf index 36666095..a1de8b97 100644 --- a/modules/hmmcopy/gccounter/main.nf +++ b/modules/hmmcopy/gccounter/main.nf @@ -5,8 +5,8 @@ process HMMCOPY_GCCOUNTER { conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : - 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_7' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_7' }" input: path fasta diff --git a/modules/hmmcopy/generatemap/main.nf b/modules/hmmcopy/generatemap/main.nf new file mode 100644 index 00000000..bedbffdb --- /dev/null +++ b/modules/hmmcopy/generatemap/main.nf @@ -0,0 +1,38 @@ +def VERSION = '0.1.1' + +process HMMCOPY_GENERATEMAP { + tag '$bam' + label 'process_long' + + conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_7': + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_7' }" + + input: + path fasta + + output: + path "*.map.bw" , emit: bigwig + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + + """ + # build required indexes + generateMap.pl -b \\ + $args \\ + $fasta + + # run + generateMap.pl \\ + $args \\ + $fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + hmmcopy: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/hmmcopy/generatemap/meta.yml b/modules/hmmcopy/generatemap/meta.yml new file mode 100644 index 00000000..ca43c6ce --- /dev/null +++ b/modules/hmmcopy/generatemap/meta.yml @@ -0,0 +1,32 @@ +name: hmmcopy_generatemap +description: Perl script (generateMap.pl) generates the mappability of a genome given a certain size of reads, for input to hmmcopy mapcounter. Takes a very long time on large genomes, is not parallelised at all. +keywords: + - hmmcopy + - mapcounter + - mappability +tools: + - hmmcopy: + description: C++ based programs for analyzing BAM files and preparing read counts -- used with bioconductor-hmmcopy + homepage: https://github.com/shahcompbio/hmmcopy_utils + documentation: https://github.com/shahcompbio/hmmcopy_utils + tool_dev_url: https://github.com/shahcompbio/hmmcopy_utils + doi: "" + licence: ['GPL v3'] + +input: + - fasta: + type: file + description: Input genome fasta file + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bigwig: + type: file + description: bigwig file containing the mappability of the genome + pattern: "*.{map.bw}" + +authors: + - "@sppearce" diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 6399b1a2..a6e89f91 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -6,8 +6,8 @@ process HMMCOPY_READCOUNTER { conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : - 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_7' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_7' }" input: tuple val(meta), path(bam), path(bai) diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7601671b..74719a30 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -685,6 +685,10 @@ hmmcopy/gccounter: - modules/hmmcopy/gccounter/** - tests/modules/hmmcopy/gccounter/** +hmmcopy/generatemap: + - modules/hmmcopy/generatemap/** + - tests/modules/hmmcopy/generatemap/** + hmmcopy/readcounter: - modules/hmmcopy/readcounter/** - tests/modules/hmmcopy/readcounter/** diff --git a/tests/modules/hmmcopy/generatemap/main.nf b/tests/modules/hmmcopy/generatemap/main.nf new file mode 100644 index 00000000..381420d3 --- /dev/null +++ b/tests/modules/hmmcopy/generatemap/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HMMCOPY_GENERATEMAP } from '../../../../modules/hmmcopy/generatemap/main.nf' + +workflow test_hmmcopy_generatemap { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HMMCOPY_GENERATEMAP ( fasta ) +} diff --git a/tests/modules/hmmcopy/generatemap/nextflow.config b/tests/modules/hmmcopy/generatemap/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/hmmcopy/generatemap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/hmmcopy/generatemap/test.yml b/tests/modules/hmmcopy/generatemap/test.yml new file mode 100644 index 00000000..274e5d0f --- /dev/null +++ b/tests/modules/hmmcopy/generatemap/test.yml @@ -0,0 +1,10 @@ +- name: hmmcopy generatemap test_hmmcopy_generatemap + command: nextflow run tests/modules/hmmcopy/generatemap -entry test_hmmcopy_generatemap -c tests/config/nextflow.config + tags: + - hmmcopy + - hmmcopy/generatemap + files: + - path: output/hmmcopy/genome.fasta.map.bw + md5sum: 7ad68224a1e40287978284c387e8eb70 + - path: output/hmmcopy/versions.yml + md5sum: f950580f94d8a2d88332c477972cb9f0 diff --git a/tests/modules/hmmcopy/readcounter/test.yml b/tests/modules/hmmcopy/readcounter/test.yml index a7e84f35..e13b0b8d 100644 --- a/tests/modules/hmmcopy/readcounter/test.yml +++ b/tests/modules/hmmcopy/readcounter/test.yml @@ -1,8 +1,10 @@ - name: hmmcopy readcounter test_hmmcopy_readcounter - command: nextflow run ./tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/readcounter/nextflow.config + command: nextflow run tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c tests/config/nextflow.config tags: - hmmcopy - hmmcopy/readcounter files: - path: output/hmmcopy/test.wig - md5sum: 3655d8325baea81b3b690791262c6b57 + md5sum: 4682778422b9a2510a3cb70bd13ccd08 + - path: output/hmmcopy/versions.yml + md5sum: 624a85e6a1bc61abc33cac03aea33a1e From 67571c4e791b48ceb5507eeb464d6e06599a6f96 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 15 Dec 2021 21:26:43 +0000 Subject: [PATCH 307/314] Add RSeqC tin.py module (#1174) * Add RSeqC tin.py module * Fix EC lint for unrelated hmmcopy module * Remove md5sum for empty file --- modules/hmmcopy/generatemap/main.nf | 2 +- modules/rseqc/tin/main.nf | 33 ++++++++ modules/rseqc/tin/meta.yml | 40 ++++++++++ tests/config/pytest_modules.yml | 101 ++++++------------------ tests/modules/rseqc/tin/main.nf | 17 ++++ tests/modules/rseqc/tin/nextflow.config | 5 ++ tests/modules/rseqc/tin/test.yml | 7 ++ 7 files changed, 128 insertions(+), 77 deletions(-) create mode 100644 modules/rseqc/tin/main.nf create mode 100644 modules/rseqc/tin/meta.yml create mode 100644 tests/modules/rseqc/tin/main.nf create mode 100644 tests/modules/rseqc/tin/nextflow.config create mode 100644 tests/modules/rseqc/tin/test.yml diff --git a/modules/hmmcopy/generatemap/main.nf b/modules/hmmcopy/generatemap/main.nf index bedbffdb..1d248853 100644 --- a/modules/hmmcopy/generatemap/main.nf +++ b/modules/hmmcopy/generatemap/main.nf @@ -1,5 +1,5 @@ def VERSION = '0.1.1' - + process HMMCOPY_GENERATEMAP { tag '$bam' label 'process_long' diff --git a/modules/rseqc/tin/main.nf b/modules/rseqc/tin/main.nf new file mode 100644 index 00000000..4c449973 --- /dev/null +++ b/modules/rseqc/tin/main.nf @@ -0,0 +1,33 @@ +process RSEQC_TIN { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" + + input: + tuple val(meta), path(bam) + path bed + + output: + tuple val(meta), path("*.txt"), emit: txt + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + tin.py \\ + -i $bam \\ + -r $bed \\ + $args \\ + > ${prefix}.tin.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + rseqc: \$(tin.py --version | sed -e "s/tin.py //g") + END_VERSIONS + """ +} diff --git a/modules/rseqc/tin/meta.yml b/modules/rseqc/tin/meta.yml new file mode 100644 index 00000000..2c711f30 --- /dev/null +++ b/modules/rseqc/tin/meta.yml @@ -0,0 +1,40 @@ +name: rseqc_tin +description: Calculte TIN (transcript integrity number) from RNA-seq reads +keywords: + - rnaseq + - transcript + - integrity +tools: + - rseqc: + description: | + RSeQC package provides a number of useful modules that can comprehensively evaluate + high throughput sequence data especially RNA-seq data. + homepage: http://rseqc.sourceforge.net/ + documentation: http://rseqc.sourceforge.net/ + doi: 10.1093/bioinformatics/bts356 + licence: ['GPL-3.0-or-later'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Input BAM file + pattern: "*.{bam}" + - bed: + type: file + description: BED file containing the reference gene model + pattern: "*.{bed}" +output: + - txt: + type: file + description: tin.py results file + pattern: "*.tin.txt" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@drpatelh" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 74719a30..cc4356a7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -270,21 +270,19 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** -cellranger/gtf: # &cellranger/gtf - - modules/cellranger/gtf/** - - tests/modules/cellranger/gtf/** - -cellranger/mkref: # &cellranger/mkref - - modules/cellranger/mkref/** - - tests/modules/cellranger/mkref/** - # - *cellranger/gtf - - modules/cellranger/gtf/** - - tests/modules/cellranger/gtf/** - cellranger/count: - modules/cellranger/count/** - tests/modules/cellranger/count/** - # - *cellranger/mkref + - modules/cellranger/mkref/** + - tests/modules/cellranger/mkref/** + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/gtf: + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/mkref: - modules/cellranger/mkref/** - tests/modules/cellranger/mkref/** - modules/cellranger/gtf/** @@ -514,7 +512,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: #&gatk4_calculatecontamination +gatk4/calculatecontamination: - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -522,7 +520,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: #&gatk4_createsomaticpanelofnormals +gatk4/createsomaticpanelofnormals: - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -534,7 +532,7 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: #&gatk4_filtermutectcalls +gatk4/filtermutectcalls: - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** @@ -542,7 +540,7 @@ gatk4/gatherbqsrreports: - modules/gatk4/gatherbqsrreports/** - tests/modules/gatk4/gatherbqsrreports/** -gatk4/genomicsdbimport: #&gatk4_genomicsdbimport +gatk4/genomicsdbimport: - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -550,7 +548,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: #&gatk4_getpileupsummaries +gatk4/getpileupsummaries: - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -566,7 +564,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: #&gatk4_learnreadorientationmodel +gatk4/learnreadorientationmodel: - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -582,7 +580,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: #&gatk4_mutect2 +gatk4/mutect2: - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -1207,6 +1205,10 @@ rseqc/readduplication: - modules/rseqc/readduplication/** - tests/modules/rseqc/readduplication/** +rseqc/tin: + - modules/rseqc/tin/** + - tests/modules/rseqc/tin/** + salmon/index: - modules/salmon/index/** - tests/modules/salmon/index/** @@ -1251,7 +1253,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: #&samtools_index +samtools/index: - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1263,7 +1265,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: #&samtools_sort +samtools/sort: - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1339,11 +1341,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: #&sratools_fasterqdump +sratools/fasterqdump: - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: #&sratools_prefetch +sratools/prefetch: - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1462,56 +1464,3 @@ yara/index: yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** - -# subworkflows/align_bowtie2: -# - subworkflows/nf-core/align_bowtie2/** -# - tests/subworkflows/nf-core/align_bowtie2/** -# - *subworkflows_bam_sort_samtools - -# subworkflows/annotation_ensemblvep: &subworkflows_annotation_ensemblvep -# - subworkflows/nf-core/annotation_ensemblvep/** -# - tests/subworkflows/nf-core/annotation_ensemblvep/** - -# subworkflows/annotation_snpeff: &subworkflows_annotation_snpeff -# - subworkflows/nf-core/annotation_snpeff/** -# - tests/subworkflows/nf-core/annotation_snpeff/** - -# subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools -# - subworkflows/nf-core/bam_stats_samtools/** -# - tests/subworkflows/nf-core/bam_stats_samtools/** - -# subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools -# - subworkflows/nf-core/bam_sort_samtools/** -# - tests/subworkflows/nf-core/bam_sort_samtools/** -# - *samtools_sort -# - *samtools_index -# - *subworkflows_bam_stats_samtools - -# subworkflows/gatk_create_som_pon: -# - subworkflows/nf-core/gatk_create_som_pon/** -# - tests/subworkflows/nf-core/gatk_create_som_pon/** -# - *gatk4_genomicsdbimport -# - *gatk4_createsomaticpanelofnormals - -# subworkflows/gatk_tumor_normal_somatic_variant_calling: -# - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** -# - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** -# - *gatk4_mutect2 -# - *gatk4_learnreadorientationmodel -# - *gatk4_getpileupsummaries -# - *gatk4_calculatecontamination -# - *gatk4_filtermutectcalls - -# subworkflows/gatk_tumor_only_somatic_variant_calling: -# - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** -# - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** -# - *gatk4_mutect2 -# - *gatk4_getpileupsummaries -# - *gatk4_calculatecontamination -# - *gatk4_filtermutectcalls - -# subworkflows/sra_fastq: -# - subworkflows/nf-core/sra_fastq/** -# - tests/subworkflows/nf-core/sra_fastq/** -# - *sratools_fasterqdump -# - *sratools_prefetch diff --git a/tests/modules/rseqc/tin/main.nf b/tests/modules/rseqc/tin/main.nf new file mode 100644 index 00000000..9b987a8e --- /dev/null +++ b/tests/modules/rseqc/tin/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { RSEQC_TIN } from '../../../../modules/rseqc/tin/main.nf' + +workflow test_rseqc_tin { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + + bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + + RSEQC_TIN ( input, bed ) +} diff --git a/tests/modules/rseqc/tin/nextflow.config b/tests/modules/rseqc/tin/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/rseqc/tin/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/rseqc/tin/test.yml b/tests/modules/rseqc/tin/test.yml new file mode 100644 index 00000000..f99ce7cc --- /dev/null +++ b/tests/modules/rseqc/tin/test.yml @@ -0,0 +1,7 @@ +- name: rseqc tin + command: nextflow run ./tests/modules/rseqc/tin -entry test_rseqc_tin -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/tin/nextflow.config + tags: + - rseqc + - rseqc/tin + files: + - path: output/rseqc/test.tin.txt From 9f8d9fb615bdb45a164e9dd59bb35043b99f5ae0 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Thu, 16 Dec 2021 08:44:50 +0000 Subject: [PATCH 308/314] Add applyvqsr (#1101) * initial commit to setup branch * workflow finished * Update nextflow.config * tumour to tumor, getpileup passed as nomral and tumor * paired_somatic renamed to tumor_normal_somatic * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Update subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf Co-authored-by: Maxime U. Garcia * updated index names in meta.yml * changed index file names in main script and test * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Apply suggestions from code review * fixed bug from changes * Apply suggestions from code review * modified yml to allow new subworkflow testing * Update test.yml * Update test.yml * add applyvqsr * added memory options, new test data used * Update main.nf * Update main.nf Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia --- modules/gatk4/applyvqsr/main.nf | 55 ++++++++++++ modules/gatk4/applyvqsr/meta.yml | 88 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 21 +++++ tests/modules/gatk4/applyvqsr/main.nf | 41 +++++++++ tests/modules/gatk4/applyvqsr/nextflow.config | 5 ++ tests/modules/gatk4/applyvqsr/test.yml | 17 ++++ 7 files changed, 231 insertions(+) create mode 100644 modules/gatk4/applyvqsr/main.nf create mode 100644 modules/gatk4/applyvqsr/meta.yml create mode 100644 tests/modules/gatk4/applyvqsr/main.nf create mode 100644 tests/modules/gatk4/applyvqsr/nextflow.config create mode 100644 tests/modules/gatk4/applyvqsr/test.yml diff --git a/modules/gatk4/applyvqsr/main.nf b/modules/gatk4/applyvqsr/main.nf new file mode 100644 index 00000000..89f79f42 --- /dev/null +++ b/modules/gatk4/applyvqsr/main.nf @@ -0,0 +1,55 @@ +process GATK4_APPLYVQSR { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" + + input: + tuple val(meta), path(vcf), path(tbi), path(recal), path(recalidx), path(tranches) + path fasta + path fai + path dict + val allelespecific + val truthsensitivity + val mode + + output: + tuple val(meta), path("*.vcf.gz") , emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + refCommand = fasta ? "-R ${fasta} " : '' + alleleSpecificCommand = allelespecific ? '-AS' : '' + truthSensitivityCommand = truthsensitivity ? "--truth-sensitivity-filter-level ${truthsensitivity}" : '' + modeCommand = mode ? "--mode ${mode} " : 'SNP' + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK ApplyVQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" ApplyVQSR \\ + ${refCommand} \\ + -V ${vcf} \\ + -O ${prefix}.vcf.gz \\ + ${alleleSpecificCommand} \\ + ${truthSensitivityCommand} \\ + --tranches-file $tranches \\ + --recal-file $recal \\ + ${modeCommand} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/applyvqsr/meta.yml b/modules/gatk4/applyvqsr/meta.yml new file mode 100644 index 00000000..b757f3e9 --- /dev/null +++ b/modules/gatk4/applyvqsr/meta.yml @@ -0,0 +1,88 @@ +name: gatk4_applyvqsr +description: | + Apply a score cutoff to filter variants based on a recalibration table. + AplyVQSR performs the second pass in a two-stage process called Variant Quality Score Recalibration (VQSR). + Specifically, it applies filtering to the input variants based on the recalibration table produced + in the first step by VariantRecalibrator and a target sensitivity value. +keywords: + - gatk4 + - applyvqsr + - VQSR +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + licence: ['Apache-2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - vcf: + type: file + description: VCF file to be recalibrated, this should be the same file as used for the first stage VariantRecalibrator. + pattern: "*.vcf" + - tbi: + type: file + description: Tbi index for the input vcf file. + pattern: "*.vcf.tbi" + - recal: + type: file + description: Recalibration file produced when the input vcf was run through VariantRecalibrator in stage 1. + pattern: "*.recal" + - recalidx: + type: file + description: Index file for the recalibration file. + pattern: ".recal.idx" + - tranches: + type: boolean + description: Tranches file produced when the input vcf was run through VariantRecalibrator in stage 1. + pattern: ".tranches" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - allelespecific: + type: boolean + description: Whether or not to run ApplyVQSR in allele specific mode, this should be kept the same as the stage 1 VariantRecalibrator run. + pattern: "{true,false}" + - truthsensitivity: + type: double + description: Value to be used as the truth sensitivity cutoff score. + pattern: "99.0" + - mode: + type: String + description: Specifies which recalibration mode to employ, should be the same as the stage 1 VariantRecalibrator run. (SNP is default, BOTH is intended for testing only) + pattern: "{SNP,INDEL,BOTH}" + +output: + - vcf: + type: file + description: compressed vcf file containing the recalibrated variants. + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of recalibrated vcf file. + pattern: "*vcf.gz.tbi" + - versions: + type: file + description: File containing software versions. + pattern: "versions.yml" + +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index cc4356a7..c261a481 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -504,6 +504,10 @@ gatk4/applybqsr: - modules/gatk4/applybqsr/** - tests/modules/gatk4/applybqsr/** +gatk4/applyvqsr: + - modules/gatk4/applyvqsr/** + - tests/modules/gatk4/applyvqsr/** + gatk4/baserecalibrator: - modules/gatk4/baserecalibrator/** - tests/modules/gatk4/baserecalibrator/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index b2edcb6f..fddc4489 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -115,6 +115,9 @@ params { transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz" + genome_21_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta" + genome_21_fasta_fai = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta.fai" + genome_21_dict = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.dict" dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi" @@ -122,6 +125,14 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + hapmap_3_3_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz" + hapmap_3_3_hg38_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz.tbi" + res_1000g_omni2_5_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/1000G_omni2.5.hg38.vcf.gz" + res_1000g_omni2_5_hg38_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/1000G_omni2.5.hg38.vcf.gz.tbi" + res_1000g_phase1_snps_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/1000G_phase1.snps.hg38.vcf.gz" + res_1000g_phase1_snps_hg38_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/1000G_phase1.snps.hg38.vcf.gz.tbi" + dbsnp_138_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/dbsnp_138.hg38.vcf.gz" + dbsnp_138_hg38_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/dbsnp_138.hg38.vcf.gz.tbi" syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" @@ -197,6 +208,16 @@ params { test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" test_pon_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_pon_genomicsdb.tar.gz" + test2_haplotc_ann_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/haplotypecaller_calls/test2_haplotc.ann.vcf.gz" + test2_haplotc_ann_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/haplotypecaller_calls/test2_haplotc.ann.vcf.gz.tbi" + + test2_recal = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2.recal" + test2_recal_idx = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2.recal.idx" + test2_tranches = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2.tranches" + test2_allele_specific_recal = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2_allele_specific.recal" + test2_allele_specific_recal_idx = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2_allele_specific.recal.idx" + test2_allele_specific_tranches = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/variantrecalibrator/test2_allele_specific.tranches" + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" diff --git a/tests/modules/gatk4/applyvqsr/main.nf b/tests/modules/gatk4/applyvqsr/main.nf new file mode 100644 index 00000000..90a57aaa --- /dev/null +++ b/tests/modules/gatk4/applyvqsr/main.nf @@ -0,0 +1,41 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_APPLYVQSR } from '../../../../modules/gatk4/applyvqsr/main.nf' + +workflow test_gatk4_applyvqsr { + input = [ [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_recal'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_recal_idx'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_tranches'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + allelespecific = false + truthsensitivity = '99.0' + mode = 'SNP' + + GATK4_APPLYVQSR ( input, fasta, fai, dict, allelespecific, truthsensitivity, mode ) +} + +workflow test_gatk4_applyvqsr_allele_specific { + input = [ [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_allele_specific_recal'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_allele_specific_recal_idx'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_allele_specific_tranches'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + allelespecific = true + truthsensitivity = '99.0' + mode = 'SNP' + + GATK4_APPLYVQSR ( input, fasta, fai, dict, allelespecific, truthsensitivity, mode ) +} diff --git a/tests/modules/gatk4/applyvqsr/nextflow.config b/tests/modules/gatk4/applyvqsr/nextflow.config new file mode 100644 index 00000000..19934e76 --- /dev/null +++ b/tests/modules/gatk4/applyvqsr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/gatk4/applyvqsr/test.yml b/tests/modules/gatk4/applyvqsr/test.yml new file mode 100644 index 00000000..e870bed3 --- /dev/null +++ b/tests/modules/gatk4/applyvqsr/test.yml @@ -0,0 +1,17 @@ +- name: gatk4 applyvqsr test_gatk4_applyvqsr + command: nextflow run tests/modules/gatk4/applyvqsr -entry test_gatk4_applyvqsr -c tests/config/nextflow.config -c ./tests/modules/gatk4/applyvqsr/nextflow.config + tags: + - gatk4 + - gatk4/applyvqsr + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 applyvqsr test_gatk4_applyvqsr_allele_specific + command: nextflow run tests/modules/gatk4/applyvqsr -entry test_gatk4_applyvqsr_allele_specific -c tests/config/nextflow.config -c ./tests/modules/gatk4/applyvqsr/nextflow.config + tags: + - gatk4 + - gatk4/applyvqsr + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.tbi From 54e0ac4ed95da110caccd4f31017ee449a5ea9ba Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Thu, 16 Dec 2021 10:54:49 +0000 Subject: [PATCH 309/314] add module: Variantrecalibrator (#1088) * committing to pull updated nf-core files * saving changes to checout other branch * committing progress so far, difficulty with test data * uploading to be used as draft PR * fix linting error in meta.yml * attempt to group reference inputs together * updated input format for resources * meta.yml updated with new resource names * added output channel for recal index * module only takes single vcf file input now * committing to checkout * update to new syntax, remove indel test for now * updated to use memory options and new test data * Update modules/gatk4/variantrecalibrator/main.nf Co-authored-by: FriederikeHanssen * Update main.nf * Update modules/gatk4/variantrecalibrator/main.nf Co-authored-by: FriederikeHanssen * remove duplicate test keys from test_data.config Co-authored-by: GCJMackenzie Co-authored-by: FriederikeHanssen --- modules/gatk4/variantrecalibrator/main.nf | 62 ++++++++++++ modules/gatk4/variantrecalibrator/meta.yml | 98 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 + .../modules/gatk4/variantrecalibrator/main.nf | 81 +++++++++++++++ .../gatk4/variantrecalibrator/nextflow.config | 5 + .../gatk4/variantrecalibrator/test.yml | 25 +++++ 6 files changed, 275 insertions(+) create mode 100644 modules/gatk4/variantrecalibrator/main.nf create mode 100644 modules/gatk4/variantrecalibrator/meta.yml create mode 100644 tests/modules/gatk4/variantrecalibrator/main.nf create mode 100644 tests/modules/gatk4/variantrecalibrator/nextflow.config create mode 100644 tests/modules/gatk4/variantrecalibrator/test.yml diff --git a/modules/gatk4/variantrecalibrator/main.nf b/modules/gatk4/variantrecalibrator/main.nf new file mode 100644 index 00000000..5641d6de --- /dev/null +++ b/modules/gatk4/variantrecalibrator/main.nf @@ -0,0 +1,62 @@ +process GATK4_VARIANTRECALIBRATOR { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" + + input: + tuple val(meta), path(vcf) , path(tbi) + path fasta + path fai + path dict + val allelespecific + tuple path(resvcfs), path(restbis), val(reslabels) + val annotation + val mode + val create_rscript + + output: + tuple val(meta), path("*.recal") , emit: recal + tuple val(meta), path("*.idx") , emit: idx + tuple val(meta), path("*.tranches"), emit: tranches + tuple val(meta), path("*plots.R") , emit: plots, optional:true + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + refCommand = fasta ? "-R ${fasta} " : '' + alleleSpecificCommand = allelespecific ? '-AS' : '' + resourceCommand = '--resource:' + reslabels.join( ' --resource:') + annotationCommand = '-an ' + annotation.join( ' -an ') + modeCommand = mode ? "--mode ${mode} " : 'SNP' + rscriptCommand = create_rscript ? "--rscript-file ${prefix}.plots.R" : '' + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK VariantRecalibrator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" VariantRecalibrator \\ + ${refCommand} \\ + -V ${vcf} \\ + ${alleleSpecificCommand} \\ + ${resourceCommand} \\ + ${annotationCommand} \\ + ${modeCommand} \\ + -O ${prefix}.recal \\ + --tranches-file ${prefix}.tranches \\ + ${rscriptCommand}\\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/variantrecalibrator/meta.yml b/modules/gatk4/variantrecalibrator/meta.yml new file mode 100644 index 00000000..92416a58 --- /dev/null +++ b/modules/gatk4/variantrecalibrator/meta.yml @@ -0,0 +1,98 @@ +name: gatk4_variantrecalibrator +description: | + Build a recalibration model to score variant quality for filtering purposes. + It is highly recommended to follow GATK best practices when using this module, + the gaussian mixture model requires a large number of samples to be used for the + tool to produce optimal results. For example, 30 samples for exome data. For more details see + https://gatk.broadinstitute.org/hc/en-us/articles/4402736812443-Which-training-sets-arguments-should-I-use-for-running-VQSR- +keywords: + - VariantRecalibrator + - gatk4 + - recalibration_model +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - vcf: + type: file + description: input vcf file containing the variants to be recalibrated + pattern: "*.vcf.gz" + - tbi: + type: file + description: tbi file matching with -vcf + pattern: "*.vcf.gz.tbi" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - allelespecific: + type: boolean + description: specify whether to use allele specific annotations + pattern: "{true,false}" + - resvcfs: + type: list + description: resource files to be used as truth, training and known sites resources, this imports the files into the module, file names are specified again in the resource_labels to be called via the command. + pattern: '*/hapmap_3.3.hg38_chr21.vcf.gz' + - restbis: + type: list + description: tbis for the corresponding vcfs files to be used as truth, training and known resources. + pattern: '*/hapmap_3.3.hg38_chr21.vcf.gz.tbi' + - reslabels: + type: list + description: labels for the resource files to be used as truth, training and known sites resources, label should include an identifier,which kind of resource(s) it is, prior value and name of the file. + pattern: "hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38_chr21.vcf.gz" + - annotation: + type: list + description: specify which annotations should be used for calculations. + pattern: "['QD', 'MQ', 'FS', 'SOR']" + - mode: + type: string + description: specifies which recalibration mode to employ (SNP is default, BOTH is intended for testing only) + pattern: "{SNP,INDEL,BOTH}" + - rscript: + type: boolean + description: specify whether to generate rscript.plot output file + pattern: "{true,false}" +output: + - recal: + type: file + description: Output recal file used by ApplyVQSR + pattern: "*.recal" + - idx: + type: file + description: Index file for the recal output file + pattern: "*.idx" + - tranches: + type: file + description: Output tranches file used by ApplyVQSR + pattern: "*.tranches" + - plots: + type: file + description: Optional output rscript file to aid in visualization of the input data and learned model. + pattern: "*plots.R" + - version: + type: file + description: File containing software versions + pattern: "*.versions.yml" +authors: + - "@GCJMackenzie" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c261a481..7e3d8f82 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -604,6 +604,10 @@ gatk4/variantfiltration: - modules/gatk4/variantfiltration/** - tests/modules/gatk4/variantfiltration/** +gatk4/variantrecalibrator: + - modules/gatk4/variantrecalibrator/** + - tests/modules/gatk4/variantrecalibrator/** + genmap/index: - modules/genmap/index/** - tests/modules/genmap/index/** diff --git a/tests/modules/gatk4/variantrecalibrator/main.nf b/tests/modules/gatk4/variantrecalibrator/main.nf new file mode 100644 index 00000000..bbc1dff5 --- /dev/null +++ b/tests/modules/gatk4/variantrecalibrator/main.nf @@ -0,0 +1,81 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_VARIANTRECALIBRATOR } from '../../../../modules/gatk4/variantrecalibrator/main.nf' + +workflow test_gatk4_variantrecalibrator { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + allelespecific = false + resources = [ + [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + ], + [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + ], + [ + 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', + 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', + '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', + 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' + ] + ] + annotation = ['QD', 'MQ', 'FS', 'SOR'] + mode = 'SNP' + create_rscript = false + + GATK4_VARIANTRECALIBRATOR ( input, fasta, fai, dict, allelespecific, resources, annotation, mode, create_rscript) +} + +workflow test_gatk4_variantrecalibrator_allele_specific { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + allelespecific = true + resources = [ + [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + ], + [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + ], + [ + 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', + 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', + '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', + 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' + ] + ] + annotation = ['QD', 'MQ', 'FS'] + mode = 'SNP' + create_rscript = false + + GATK4_VARIANTRECALIBRATOR ( input, fasta, fai, dict, allelespecific, resources, annotation, mode, create_rscript) +} diff --git a/tests/modules/gatk4/variantrecalibrator/nextflow.config b/tests/modules/gatk4/variantrecalibrator/nextflow.config new file mode 100644 index 00000000..19934e76 --- /dev/null +++ b/tests/modules/gatk4/variantrecalibrator/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/gatk4/variantrecalibrator/test.yml b/tests/modules/gatk4/variantrecalibrator/test.yml new file mode 100644 index 00000000..42b18e36 --- /dev/null +++ b/tests/modules/gatk4/variantrecalibrator/test.yml @@ -0,0 +1,25 @@ +- name: gatk4 variantrecalibrator test_gatk4_variantrecalibrator + command: nextflow run tests/modules/gatk4/variantrecalibrator -entry test_gatk4_variantrecalibrator -c tests/config/nextflow.config -c ./tests/modules/gatk4/variantrecalibrator/nextflow.config + tags: + - gatk4 + - gatk4/variantrecalibrator + files: + - path: output/gatk4/test.recal + contains: + - "#CHROM POS ID REF ALT QUAL FILTER INFO" + - path: output/gatk4/test.recal.idx + - path: output/gatk4/test.tranches + md5sum: d238e97bf996863969dac7751e345549 + +- name: gatk4 variantrecalibrator test_gatk4_variantrecalibrator_allele_specific + command: nextflow run tests/modules/gatk4/variantrecalibrator -entry test_gatk4_variantrecalibrator_allele_specific -c tests/config/nextflow.config -c ./tests/modules/gatk4/variantrecalibrator/nextflow.config + tags: + - gatk4 + - gatk4/variantrecalibrator + files: + - path: output/gatk4/test.recal + contains: + - "#CHROM POS ID REF ALT QUAL FILTER INFO" + - path: output/gatk4/test.recal.idx + - path: output/gatk4/test.tranches + md5sum: 444438d46716593634a6817958099292 From 0d1e21686a586447b7592e40da9b3a7cdeedf03c Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Thu, 16 Dec 2021 10:56:41 +0000 Subject: [PATCH 310/314] Bump picard version from 2.25.7 to 2.26.7 for log4j vulnerability (#1173) * Bump picard version from 2.25.7 to 2.26.7 for log4j vulnerability * Fix EC lint for unrelated hmmcopy module --- modules/picard/collecthsmetrics/main.nf | 6 +++--- modules/picard/collectmultiplemetrics/main.nf | 6 +++--- modules/picard/collectwgsmetrics/main.nf | 6 +++--- modules/picard/filtersamreads/main.nf | 6 +++--- modules/picard/markduplicates/main.nf | 6 +++--- modules/picard/mergesamfiles/main.nf | 6 +++--- modules/picard/sortsam/main.nf | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 3705b8fb..9dabc3aa 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTHSMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.2--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index 6b292534..481761e3 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTMULTIPLEMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index eddb4604..361ca1b5 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index d8de137b..70bd2fa7 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -2,10 +2,10 @@ process PICARD_FILTERSAMREADS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(readlist) diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index d3bf6938..3087bff4 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -2,10 +2,10 @@ process PICARD_MARKDUPLICATES { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 86796593..952d6331 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -2,10 +2,10 @@ process PICARD_MERGESAMFILES { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bams) diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index eb3caf40..7728484c 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -2,10 +2,10 @@ process PICARD_SORTSAM { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) + conda (params.enable_conda ? "bioconda::picard=2.26.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.26.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.7--hdfd78af_0' }" input: tuple val(meta), path(bam) From 3eff67f54da90901425f18dc11a447b922b09cee Mon Sep 17 00:00:00 2001 From: SusiJo <43847534+SusiJo@users.noreply.github.com> Date: Fri, 17 Dec 2021 09:08:14 +0100 Subject: [PATCH 311/314] Added new module snpsift/split (#1031) * added new module snpsift/split * added options.args * added .vcf.gz to input * removed test and updated to new NF DSL2 syntax * Updated to new NF DSL2 syntax * added option to join vcf files Co-authored-by: Robert A. Petit III Co-authored-by: FriederikeHanssen --- modules/snpsift/split/main.nf | 48 +++++++++++++++++++++ modules/snpsift/split/meta.yml | 44 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/snpsift/split/main.nf | 32 ++++++++++++++ tests/modules/snpsift/split/nextflow.config | 5 +++ tests/modules/snpsift/split/test.yml | 32 ++++++++++++++ 7 files changed, 167 insertions(+) create mode 100644 modules/snpsift/split/main.nf create mode 100644 modules/snpsift/split/meta.yml create mode 100644 tests/modules/snpsift/split/main.nf create mode 100644 tests/modules/snpsift/split/nextflow.config create mode 100644 tests/modules/snpsift/split/test.yml diff --git a/modules/snpsift/split/main.nf b/modules/snpsift/split/main.nf new file mode 100644 index 00000000..a83052ad --- /dev/null +++ b/modules/snpsift/split/main.nf @@ -0,0 +1,48 @@ +process SNPSIFT_SPLIT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::snpsift=4.3.1t" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpsift:4.3.1t--hdfd78af_3' : + 'quay.io/biocontainers/snpsift:4.3.1t--hdfd78af_3' }" + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.vcf"), emit: out_vcfs + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if (meta.split) { + """ + SnpSift \\ + split \\ + $args \\ + $vcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snpsift: \$( echo \$(SnpSift split -h 2>&1) | sed 's/^.*version //' | sed 's/(.*//' | sed 's/t//g' ) + END_VERSIONS + """ + } else { + """ + SnpSift \\ + split \\ + -j \\ + $args \\ + $vcf \\ + > ${prefix}.joined.vcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snpsift: \$( echo \$(SnpSift split -h 2>&1) | sed 's/^.*version //' | sed 's/(.*//' | sed 's/t//g' ) + END_VERSIONS + """ + } + +} diff --git a/modules/snpsift/split/meta.yml b/modules/snpsift/split/meta.yml new file mode 100644 index 00000000..5a125b62 --- /dev/null +++ b/modules/snpsift/split/meta.yml @@ -0,0 +1,44 @@ +name: snpsift_split +description: Splits/Joins VCF(s) file into chromosomes +keywords: + - split + - join + - vcf +tools: + - snpsift: + description: SnpSift is a toolbox that allows you to filter and manipulate annotated files + homepage: https://pcingola.github.io/SnpEff/ss_introduction/ + documentation: https://pcingola.github.io/SnpEff/ss_introduction/ + tool_dev_url: https://github.com/pcingola/SnpEff + doi: "10.3389/fgene.2012.00035" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file (split) or list of files (join) + description: VCF file(s) + pattern: "*.{vcf,vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - out_vcfs: + type: file + description: Split/Joined VCF file(s) + pattern: "*.vcf" + +authors: + - "@SusiJo" + - "@jonasscheid" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7e3d8f82..708b5a27 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1337,6 +1337,10 @@ snpeff: - modules/snpeff/** - tests/modules/snpeff/** +snpsift/split: + - modules/snpsift/split/** + - tests/modules/snpsift/split/** + snpsites: - modules/snpsites/** - tests/modules/snpsites/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index fddc4489..ee1ba0d8 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -250,6 +250,8 @@ params { cutandrun_bedgraph_test_1 = "${test_data_dir}/genomics/homo_sapiens/illumina/bedgraph/cutandtag_h3k27me3_test_1.bedGraph" cutandrun_bedgraph_test_2 = "${test_data_dir}/genomics/homo_sapiens/illumina/bedgraph/cutandtag_igg_test_1.bedGraph" + + test_rnaseq_vcf = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.rnaseq.vcf" } 'pacbio' { primers = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/primers.fasta" diff --git a/tests/modules/snpsift/split/main.nf b/tests/modules/snpsift/split/main.nf new file mode 100644 index 00000000..4579fee3 --- /dev/null +++ b/tests/modules/snpsift/split/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SNPSIFT_SPLIT } from '../../../../modules/snpsift/split/main.nf' + +workflow test_snpsift_split_base { + + input = [ [ id:'test', split:true], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_vcf'], checkIfExists: true) ] + + SNPSIFT_SPLIT ( input ) +} + +workflow test_snpsift_split_gz { + + input = [ [ id:'test', split:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + SNPSIFT_SPLIT ( input ) +} + +workflow test_snpsift_join { + + input = [ [ id:'test', split:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true) ] + ] + + SNPSIFT_SPLIT ( input ) + +} diff --git a/tests/modules/snpsift/split/nextflow.config b/tests/modules/snpsift/split/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpsift/split/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpsift/split/test.yml b/tests/modules/snpsift/split/test.yml new file mode 100644 index 00000000..529fbca5 --- /dev/null +++ b/tests/modules/snpsift/split/test.yml @@ -0,0 +1,32 @@ +- name: snpsift split test_snpsift_split_base + command: nextflow run tests/modules/snpsift/split -entry test_snpsift_split_base -c tests/config/nextflow.config + tags: + - snpsift/split + - snpsift + files: + - path: output/snpsift/test.rnaseq.chr22.vcf + md5sum: 1bb8724dcbe6fa3101a814c0be51d1ea + - path: output/snpsift/versions.yml + md5sum: 2b9f6b788db6c4fcbf258db763d8fab7 + +- name: snpsift split test_snpsift_split_gz + command: nextflow run tests/modules/snpsift/split -entry test_snpsift_split_gz -c tests/config/nextflow.config + tags: + - snpsift/split + - snpsift + files: + - path: output/snpsift/test.MT192765.1.vcf + md5sum: 9d491cfa84067450342ba8e66c75e5b8 + - path: output/snpsift/versions.yml + md5sum: 6bd63376670d6c1445caea2f31a3f579 + +- name: snpsift split test_snpsift_join + command: nextflow run tests/modules/snpsift/split -entry test_snpsift_join -c tests/config/nextflow.config + tags: + - snpsift/split + - snpsift + files: + - path: output/snpsift/test.joined.vcf + md5sum: c400c7458524d889e0967b06ed72534f + - path: output/snpsift/versions.yml + md5sum: be54682a73d3b91a17eacc0e533448f5 From 754701fb3ac69ddf676062a0091cf2160f5094eb Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 17 Dec 2021 10:32:33 +0100 Subject: [PATCH 312/314] Add Gitpod environment to modules to allow pytest-workflow testing. (#1151) * Add Gitpod Env * Add Gitpod Env * Change Gitpod to install only Nextflow and pytest-workflow Co-authored-by: Harshil Patel --- .gitpod.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .gitpod.yml diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 00000000..6fe79af6 --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,10 @@ +# List the start up tasks. Learn more https://www.gitpod.io/docs/config-start-tasks/ +tasks: + - name: Install Nextflow + init: | + curl -s https://get.nextflow.io | bash + sudo mv nextflow /usr/local/bin + + - name: Install pytest-workflow + init: | + pip install pytest-workflow From ce8c781bb494c2cc1f0a951c31c7b2f4af12e8af Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 17 Dec 2021 10:00:09 +0000 Subject: [PATCH 313/314] Provide BAI when running rseqc/tin (#1177) * Provide BAI when running rseqc/tin * Add md5sum * Add proper md5sums * md5sum not md5 * Add xls to meta.yml --- modules/rseqc/tin/main.nf | 6 +++--- modules/rseqc/tin/meta.yml | 12 ++++++++++-- tests/modules/rseqc/tin/main.nf | 3 ++- tests/modules/rseqc/tin/test.yml | 5 ++++- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/modules/rseqc/tin/main.nf b/modules/rseqc/tin/main.nf index 4c449973..b7bff0f3 100644 --- a/modules/rseqc/tin/main.nf +++ b/modules/rseqc/tin/main.nf @@ -8,11 +8,12 @@ process RSEQC_TIN { 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(bai) path bed output: tuple val(meta), path("*.txt"), emit: txt + tuple val(meta), path("*.xls"), emit: xls path "versions.yml" , emit: versions script: @@ -22,8 +23,7 @@ process RSEQC_TIN { tin.py \\ -i $bam \\ -r $bed \\ - $args \\ - > ${prefix}.tin.txt + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/rseqc/tin/meta.yml b/modules/rseqc/tin/meta.yml index 2c711f30..158b4033 100644 --- a/modules/rseqc/tin/meta.yml +++ b/modules/rseqc/tin/meta.yml @@ -23,6 +23,10 @@ input: type: file description: Input BAM file pattern: "*.{bam}" + - bai: + type: file + description: Index for input BAM file + pattern: "*.{bai}" - bed: type: file description: BED file containing the reference gene model @@ -30,8 +34,12 @@ input: output: - txt: type: file - description: tin.py results file - pattern: "*.tin.txt" + description: TXT file containing tin.py results summary + pattern: "*.txt" + - xls: + type: file + description: XLS file containing tin.py results + pattern: "*.xls" - versions: type: file description: File containing software versions diff --git a/tests/modules/rseqc/tin/main.nf b/tests/modules/rseqc/tin/main.nf index 9b987a8e..677e1165 100644 --- a/tests/modules/rseqc/tin/main.nf +++ b/tests/modules/rseqc/tin/main.nf @@ -8,7 +8,8 @@ workflow test_rseqc_tin { input = [ [ id:'test' ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) diff --git a/tests/modules/rseqc/tin/test.yml b/tests/modules/rseqc/tin/test.yml index f99ce7cc..9faae202 100644 --- a/tests/modules/rseqc/tin/test.yml +++ b/tests/modules/rseqc/tin/test.yml @@ -4,4 +4,7 @@ - rseqc - rseqc/tin files: - - path: output/rseqc/test.tin.txt + - path: output/rseqc/test.paired_end.sorted.summary.txt + md5sum: 9d98447e178b89a89f6f5aba7a772fe6 + - path: output/rseqc/test.paired_end.sorted.tin.xls + md5sum: 6b1b1b0dc1dc265342ba8c3f27fa60e6 From 02218ab5a05a643c869a670e933ee5f6fa6e1d43 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Fri, 17 Dec 2021 15:39:50 +0000 Subject: [PATCH 314/314] hmmcopy/mapCounter (#1175) * hmmcopy/mapCounter * update test * Remove bam tag * Remove /tmp/ path from test.yml * Update modules/hmmcopy/mapcounter/meta.yml Incorporate formatting changes Co-authored-by: James A. Fellows Yates * Update modules/hmmcopy/mapcounter/meta.yml Co-authored-by: James A. Fellows Yates * Update tests/modules/hmmcopy/mapcounter/main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: Simon Pearce Co-authored-by: James A. Fellows Yates --- modules/hmmcopy/mapcounter/main.nf | 31 +++++++++++++++++ modules/hmmcopy/mapcounter/meta.yml | 34 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 +++ tests/modules/hmmcopy/mapcounter/main.nf | 14 ++++++++ .../hmmcopy/mapcounter/nextflow.config | 5 +++ tests/modules/hmmcopy/mapcounter/test.yml | 12 +++++++ 6 files changed, 100 insertions(+) create mode 100644 modules/hmmcopy/mapcounter/main.nf create mode 100644 modules/hmmcopy/mapcounter/meta.yml create mode 100644 tests/modules/hmmcopy/mapcounter/main.nf create mode 100644 tests/modules/hmmcopy/mapcounter/nextflow.config create mode 100644 tests/modules/hmmcopy/mapcounter/test.yml diff --git a/modules/hmmcopy/mapcounter/main.nf b/modules/hmmcopy/mapcounter/main.nf new file mode 100644 index 00000000..ab20868e --- /dev/null +++ b/modules/hmmcopy/mapcounter/main.nf @@ -0,0 +1,31 @@ +def VERSION = '0.1.1' // Version information not provided by tool on CLI + +process HMMCOPY_MAPCOUNTER { + label 'process_medium' + + conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_7': + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_7' }" + + input: + path bigwig + + output: + path "*.map.wig" , emit: wig + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + + """ + mapCounter \\ + $args \\ + $bigwig > ${bigwig.baseName}.map.wig + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + hmmcopy: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/hmmcopy/mapcounter/meta.yml b/modules/hmmcopy/mapcounter/meta.yml new file mode 100644 index 00000000..8f8b9aae --- /dev/null +++ b/modules/hmmcopy/mapcounter/meta.yml @@ -0,0 +1,34 @@ +name: hmmcopy_mapcounter +description: mapCounter function from HMMcopy utilities, used to generate mappability in non-overlapping windows from a bigwig file +keywords: + - hmmcopy + - mapcounter + - cnv +tools: + - hmmcopy: + description: C++ based programs for analyzing BAM files and preparing read counts -- used with bioconductor-hmmcopy + homepage: https://github.com/shahcompbio/hmmcopy_utils + documentation: https://github.com/shahcompbio/hmmcopy_utils + tool_dev_url: https://github.com/shahcompbio/hmmcopy_utils + doi: "" + licence: ['GPL v3'] + +input: + - bigwig: + type: file + description: BigWig file with the mappability score of the genome, for instance made with generateMap function. + pattern: "*.wig" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + - wig: + type: file + description: wig file containing mappability of each window of the genome + pattern: "*.map.wig" + +authors: + - "@sppearce" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 708b5a27..9ebaa99a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -695,6 +695,10 @@ hmmcopy/generatemap: - modules/hmmcopy/generatemap/** - tests/modules/hmmcopy/generatemap/** +hmmcopy/mapcounter: + - modules/hmmcopy/mapcounter/** + - tests/modules/hmmcopy/mapcounter/** + hmmcopy/readcounter: - modules/hmmcopy/readcounter/** - tests/modules/hmmcopy/readcounter/** diff --git a/tests/modules/hmmcopy/mapcounter/main.nf b/tests/modules/hmmcopy/mapcounter/main.nf new file mode 100644 index 00000000..c364f0f7 --- /dev/null +++ b/tests/modules/hmmcopy/mapcounter/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HMMCOPY_MAPCOUNTER } from '../../../../modules/hmmcopy/mapcounter/main.nf' +include { HMMCOPY_GENERATEMAP } from '../../../../modules/hmmcopy/generatemap/main.nf' +workflow test_hmmcopy_mapcounter { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HMMCOPY_GENERATEMAP( fasta ) + + HMMCOPY_MAPCOUNTER ( HMMCOPY_GENERATEMAP.out.bigwig ) +} diff --git a/tests/modules/hmmcopy/mapcounter/nextflow.config b/tests/modules/hmmcopy/mapcounter/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/hmmcopy/mapcounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/hmmcopy/mapcounter/test.yml b/tests/modules/hmmcopy/mapcounter/test.yml new file mode 100644 index 00000000..204800a1 --- /dev/null +++ b/tests/modules/hmmcopy/mapcounter/test.yml @@ -0,0 +1,12 @@ +- name: hmmcopy mapcounter test_hmmcopy_mapcounter + command: nextflow run tests/modules/hmmcopy/mapcounter -entry test_hmmcopy_mapcounter -c tests/config/nextflow.config + tags: + - hmmcopy/mapcounter + - hmmcopy + files: + - path: output/hmmcopy/genome.fasta.map.bw + md5sum: 7ad68224a1e40287978284c387e8eb70 + - path: output/hmmcopy/genome.fasta.map.map.wig + md5sum: e2d39dc204ed31c1ce372d633a42560f + - path: output/hmmcopy/versions.yml + md5sum: 8361e3c0f8b96cf84834678cf988a209